ansible-playbook [core 2.17.7] config file = None configured module search path = ['/root/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules'] ansible python module location = /usr/local/lib/python3.12/site-packages/ansible ansible collection location = /tmp/collections-Ij3 executable location = /usr/local/bin/ansible-playbook python version = 3.12.8 (main, Dec 3 2024, 00:00:00) [GCC 14.2.1 20241104 (Red Hat 14.2.1-6)] (/usr/bin/python3.12) jinja version = 3.1.4 libyaml = True No config file found; using defaults running playbook inside collection fedora.linux_system_roles redirecting (type: callback) ansible.builtin.debug to ansible.posix.debug redirecting (type: callback) ansible.builtin.debug to ansible.posix.debug redirecting (type: callback) ansible.builtin.profile_tasks to ansible.posix.profile_tasks Skipping callback 'default', as we already have a stdout callback. Skipping callback 'minimal', as we already have a stdout callback. Skipping callback 'oneline', as we already have a stdout callback. PLAYBOOK: tests_quadlet_demo.yml *********************************************** 2 plays in /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml PLAY [all] ********************************************************************* TASK [Include vault variables] ************************************************* task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:5 Tuesday 07 January 2025 09:13:27 -0500 (0:00:00.008) 0:00:00.008 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_test_password": { "__ansible_vault": "$ANSIBLE_VAULT;1.1;AES256\n35383939616163653333633431363463313831383037386236646138333162396161356130303461\n3932623930643263313563336163316337643562333936360a363538636631313039343233383732\n38666530383538656639363465313230343533386130303833336434303438333161656262346562\n3362626538613031640a663330613638366132356534363534353239616666653466353961323533\n6565\n" }, "mysql_container_root_password": { "__ansible_vault": "$ANSIBLE_VAULT;1.1;AES256\n61333932373230333539663035366431326163363166363036323963623131363530326231303634\n6635326161643165363366323062333334363730376631660a393566366139353861656364656661\n38653463363837336639363032646433666361646535366137303464623261313663643336306465\n6264663730656337310a343962353137386238383064646533366433333437303566656433386233\n34343235326665646661623131643335313236313131353661386338343366316261643634653633\n3832313034366536616531323963333234326461353130303532\n" } }, "ansible_included_var_files": [ "/tmp/podman-iXu/tests/vars/vault-variables.yml" ], "changed": false } PLAY [Deploy the quadlet demo app] ********************************************* TASK [Gathering Facts] ********************************************************* task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:9 Tuesday 07 January 2025 09:13:27 -0500 (0:00:00.026) 0:00:00.034 ******* [WARNING]: Platform linux on host managed-node2 is using the discovered Python interpreter at /usr/bin/python3.12, but future installation of another Python interpreter could change the meaning of that path. See https://docs.ansible.com/ansible- core/2.17/reference_appendices/interpreter_discovery.html for more information. ok: [managed-node2] TASK [Test is only supported on x86_64] **************************************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:38 Tuesday 07 January 2025 09:13:28 -0500 (0:00:01.367) 0:00:01.402 ******* skipping: [managed-node2] => { "false_condition": "ansible_facts[\"architecture\"] != \"x86_64\"" } TASK [End test] **************************************************************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:45 Tuesday 07 January 2025 09:13:28 -0500 (0:00:00.042) 0:00:01.444 ******* META: end_play conditional evaluated to False, continuing play skipping: [managed-node2] => { "skip_reason": "end_play conditional evaluated to False, continuing play" } MSG: end_play TASK [Generate certificates] *************************************************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:51 Tuesday 07 January 2025 09:13:28 -0500 (0:00:00.032) 0:00:01.476 ******* included: fedora.linux_system_roles.certificate for managed-node2 TASK [fedora.linux_system_roles.certificate : Set version specific variables] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:2 Tuesday 07 January 2025 09:13:28 -0500 (0:00:00.114) 0:00:01.590 ******* included: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/set_vars.yml for managed-node2 TASK [fedora.linux_system_roles.certificate : Ensure ansible_facts used by role] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/set_vars.yml:2 Tuesday 07 January 2025 09:13:28 -0500 (0:00:00.055) 0:00:01.646 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__certificate_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.certificate : Check if system is ostree] ******* task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/set_vars.yml:10 Tuesday 07 January 2025 09:13:28 -0500 (0:00:00.073) 0:00:01.719 ******* ok: [managed-node2] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.certificate : Set flag to indicate system is ostree] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/set_vars.yml:15 Tuesday 07 January 2025 09:13:29 -0500 (0:00:00.578) 0:00:02.298 ******* ok: [managed-node2] => { "ansible_facts": { "__certificate_is_ostree": false }, "changed": false } TASK [fedora.linux_system_roles.certificate : Set platform/version specific variables] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/set_vars.yml:19 Tuesday 07 January 2025 09:13:29 -0500 (0:00:00.092) 0:00:02.391 ******* skipping: [managed-node2] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node2] => (item=CentOS_10.yml) => { "ansible_facts": { "__certificate_certmonger_packages": [ "certmonger", "python3-packaging" ] }, "ansible_included_var_files": [ "/tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/certificate/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } ok: [managed-node2] => (item=CentOS_10.yml) => { "ansible_facts": { "__certificate_certmonger_packages": [ "certmonger", "python3-packaging" ] }, "ansible_included_var_files": [ "/tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/certificate/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } TASK [fedora.linux_system_roles.certificate : Ensure certificate role dependencies are installed] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:5 Tuesday 07 January 2025 09:13:29 -0500 (0:00:00.118) 0:00:02.509 ******* changed: [managed-node2] => { "changed": true, "rc": 0, "results": [ "Installed: python3-cffi-1.16.0-7.el10.x86_64", "Installed: python3-pyasn1-0.6.1-1.el10.noarch", "Installed: python3-cryptography-43.0.0-4.el10.x86_64", "Installed: python3-ply-3.11-25.el10.noarch", "Installed: python3-pycparser-2.20-16.el10.noarch" ] } TASK [fedora.linux_system_roles.certificate : Ensure provider packages are installed] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:23 Tuesday 07 January 2025 09:13:32 -0500 (0:00:02.645) 0:00:05.155 ******* changed: [managed-node2] => (item=certmonger) => { "__certificate_provider": "certmonger", "ansible_loop_var": "__certificate_provider", "changed": true, "rc": 0, "results": [ "Installed: nss-sysinit-3.101.0-13.el10.x86_64", "Installed: nss-util-3.101.0-13.el10.x86_64", "Installed: certmonger-0.79.20-3.el10.x86_64", "Installed: python3-packaging-23.2-6.el10.noarch", "Installed: dbus-tools-1:1.14.10-5.el10.x86_64", "Installed: nspr-4.35.0-34.el10.x86_64", "Installed: nss-3.101.0-13.el10.x86_64", "Installed: nss-softokn-3.101.0-13.el10.x86_64", "Installed: nss-softokn-freebl-3.101.0-13.el10.x86_64" ] } TASK [fedora.linux_system_roles.certificate : Ensure pre-scripts hooks directory exists] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:35 Tuesday 07 January 2025 09:13:35 -0500 (0:00:03.310) 0:00:08.465 ******* changed: [managed-node2] => (item=certmonger) => { "__certificate_provider": "certmonger", "ansible_loop_var": "__certificate_provider", "changed": true, "gid": 0, "group": "root", "mode": "0700", "owner": "root", "path": "/etc/certmonger//pre-scripts", "secontext": "unconfined_u:object_r:etc_t:s0", "size": 6, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.certificate : Ensure post-scripts hooks directory exists] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:61 Tuesday 07 January 2025 09:13:36 -0500 (0:00:00.598) 0:00:09.064 ******* changed: [managed-node2] => (item=certmonger) => { "__certificate_provider": "certmonger", "ansible_loop_var": "__certificate_provider", "changed": true, "gid": 0, "group": "root", "mode": "0700", "owner": "root", "path": "/etc/certmonger//post-scripts", "secontext": "unconfined_u:object_r:etc_t:s0", "size": 6, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.certificate : Ensure provider service is running] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:90 Tuesday 07 January 2025 09:13:36 -0500 (0:00:00.440) 0:00:09.504 ******* changed: [managed-node2] => (item=certmonger) => { "__certificate_provider": "certmonger", "ansible_loop_var": "__certificate_provider", "changed": true, "enabled": true, "name": "certmonger", "state": "started", "status": { "AccessSELinuxContext": "system_u:object_r:certmonger_unit_file_t:s0", "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "syslog.target network.target basic.target dbus.socket sysinit.target system.slice systemd-journald.socket dbus-broker.service", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "BusName": "org.fedorahosted.certmonger", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "Certificate monitoring and PKI enrollment", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveMemoryHigh": "3698233344", "EffectiveMemoryMax": "3698233344", "EffectiveTasksMax": "22349", "EnvironmentFiles": "/etc/sysconfig/certmonger (ignore_errors=yes)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/sbin/certmonger ; argv[]=/usr/sbin/certmonger -S -p /run/certmonger.pid -n $OPTS ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/sbin/certmonger ; argv[]=/usr/sbin/certmonger -S -p /run/certmonger.pid -n $OPTS ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/usr/lib/systemd/system/certmonger.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "certmonger.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13968", "LimitNPROCSoft": "13968", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13968", "LimitSIGPENDINGSoft": "13968", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3095617536", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "[not set]", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "[not set]", "MemorySwapMax": "infinity", "MemorySwapPeak": "[not set]", "MemoryZSwapCurrent": "[not set]", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "certmonger.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "PIDFile": "/run/certmonger.pid", "PartOf": "dbus-broker.service", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "sysinit.target system.slice dbus.socket", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22349", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "dbus", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "disabled", "UtmpMode": "init", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity" } } TASK [fedora.linux_system_roles.certificate : Ensure certificate requests] ***** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:101 Tuesday 07 January 2025 09:13:37 -0500 (0:00:01.212) 0:00:10.717 ******* changed: [managed-node2] => (item={'name': 'quadlet_demo', 'dns': ['localhost'], 'ca': 'self-sign'}) => { "ansible_loop_var": "item", "changed": true, "item": { "ca": "self-sign", "dns": [ "localhost" ], "name": "quadlet_demo" } } MSG: Certificate requested (new). TASK [fedora.linux_system_roles.certificate : Slurp the contents of the files] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:152 Tuesday 07 January 2025 09:13:38 -0500 (0:00:01.016) 0:00:11.734 ******* ok: [managed-node2] => (item=['cert', {'name': 'quadlet_demo', 'dns': ['localhost'], 'ca': 'self-sign'}]) => { "ansible_loop_var": "item", "changed": false, "content": "LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSURnekNDQW11Z0F3SUJBZ0lSQU5namRkOVJPa3BCZ25LejErNTJhaG93RFFZSktvWklodmNOQVFFTEJRQXcKVURFZ01CNEdBMVVFQXd3WFRHOWpZV3dnVTJsbmJtbHVaeUJCZFhSb2IzSnBkSGt4TERBcUJnTlZCQU1NSTJRNApNak0zTldSbUxUVXhNMkUwWVRReExUZ3lOekppTTJRM0xXVmxOelkyWVRFNU1CNFhEVEkxTURFd056RTBNVE16Ck9Gb1hEVEkyTURFd056RTBNVE16TjFvd0ZERVNNQkFHQTFVRUF4TUpiRzlqWVd4b2IzTjBNSUlCSWpBTkJna3EKaGtpRzl3MEJBUUVGQUFPQ0FROEFNSUlCQ2dLQ0FRRUFuNHFMUklwTjJmVU1BcUc4eGNKT21kZVNsR3RvdjdNVApHZFpzK0lRMUdzSkh4Zm01KzJUR3ZHb3NvUnBkd3duOWJCdU5yb0hrUDFXUW9MVnJGMVJ6NnlKdEoxRThzY3ROCnlvRmZpbmJxQkxmOGZQTm5iLzhJVWVGN1AzbDBGOGRWODNvTXVKR0srSVp2dE5VbkRJZmlqTXQ2RVlRS0JUNUoKNExKK0gwL2Ixc1pHUURyaCtoMUZEUXhrRk53aXMxRlVxZEdJTndVZHJTSkRxTncwSmdRVUh4TW4xUCtHemVPOApnMDNEVHROejdxOW8xZDRjNkJTcStCZzMrRHV1SndRRUd2MmJXaW5HdjgzditQb28wQUxsaXdjc0QxQlNpTXRWCis3K2VMR2wreS9TZFhEQ054RzFxQWF5NUQ0UE9oY2NvcUhhVWNnSVFqVERSWENEUFhjZ1U2d0lEQVFBQm80R1QKTUlHUU1Bc0dBMVVkRHdRRUF3SUZvREFVQmdOVkhSRUVEVEFMZ2dsc2IyTmhiR2h2YzNRd0hRWURWUjBsQkJZdwpGQVlJS3dZQkJRVUhBd0VHQ0NzR0FRVUZCd01DTUF3R0ExVWRFd0VCL3dRQ01BQXdIUVlEVlIwT0JCWUVGUE9hClNzWHVZaXVDbS9wQmRzUXIwb2htOU5HN01COEdBMVVkSXdRWU1CYUFGRytTQnBjL1YrT1F2SjBHRnJqbGtTTkQKZE1sS01BMEdDU3FHU0liM0RRRUJDd1VBQTRJQkFRQjE1S0s2N2RuMEFPSVlmR1JZbU1pTTN0YjRaS2JtMFlrNgpyM0dGa2dwWlJVaGFuUVRlcXRhMFFhYk1VbW9wdXdNZXUyWFcySmU5bTBwTG1SK1hrRVgycjFSTGpibytidGorCkJTVElMRkQ0TjA5YzdKKzVSZENESnNKR2U0c0UzTmpLaEdTV1k5Ym9Mb1kzbXpxcVY5U1JSV3gwMlBGSDdGMXMKaFVITGZFV201WjcyWk1Lc0h6MWRGUnA4RlNhZ21FRHN1c3gzcXRwa3pZSkN3bEg1SzhEU094SXBjQVpUaXFxSQppUWRrM0laa2tnNjN2bUFDbjJqNzkxQStudm1nWXl0Uy91eWtIMjIzV3FjbzJ4eTdObFVpTUNzTnlCZFY0RmxiCnFkRTJUcG4wMUkvVXQzYnY4bjdNak0yV3ZSUVIxU01vZC9KVHBSTkRmcTBwSDNSb3oyTWoKLS0tLS1FTkQgQ0VSVElGSUNBVEUtLS0tLQo=", "encoding": "base64", "item": [ "cert", { "ca": "self-sign", "dns": [ "localhost" ], "name": "quadlet_demo" } ], "source": "/etc/pki/tls/certs/quadlet_demo.crt" } ok: [managed-node2] => (item=['key', {'name': 'quadlet_demo', 'dns': ['localhost'], 'ca': 'self-sign'}]) => { "ansible_loop_var": "item", "changed": false, "content": "LS0tLS1CRUdJTiBQUklWQVRFIEtFWS0tLS0tCk1JSUV2UUlCQURBTkJna3Foa2lHOXcwQkFRRUZBQVNDQktjd2dnU2pBZ0VBQW9JQkFRQ2Zpb3RFaWszWjlRd0MKb2J6RndrNloxNUtVYTJpL3N4TVoxbXo0aERVYXdrZkYrYm43Wk1hOGFpeWhHbDNEQ2Yxc0c0MnVnZVEvVlpDZwp0V3NYVkhQckltMG5VVHl4eTAzS2dWK0tkdW9FdC94ODgyZHYvd2hSNFhzL2VYUVh4MVh6ZWd5NGtZcjRobSswCjFTY01oK0tNeTNvUmhBb0ZQa25nc240ZlQ5dld4a1pBT3VINkhVVU5ER1FVM0NLelVWU3AwWWczQlIydElrT28KM0RRbUJCUWZFeWZVLzRiTjQ3eURUY05PMDNQdXIyalYzaHpvRktyNEdEZjRPNjRuQkFRYS9adGFLY2EvemUvNAoraWpRQXVXTEJ5d1BVRktJeTFYN3Y1NHNhWDdMOUoxY01JM0ViV29CckxrUGc4NkZ4eWlvZHBSeUFoQ05NTkZjCklNOWR5QlRyQWdNQkFBRUNnZ0VBSzJxOXFjSmxtMW9Ia2JQSU5wMUJYTGFBeW1sTWtWYS9iUG1nb0NVQ2VxekMKY1lzYlpPR3cvOWQ1RzhLY0g1YlpZNUZBQm15TTIvdUNKMm9Yb2JxMjdSZHFVeGZWaDZFSEk0UC9weVBqWGJOZwpQL1RSZ25ZS0lwWlZQeG5qVVJGTFA3NUlOZlpOdG9LSFpkUFlFNTNyTmx1dGRaVFVVVWI1UGUvMVNMb24zcE9hCm41Mi9XcXBHMFpVOWdGU1Y3THFpc0J3TC9iMU83cHhxUXhiYVhYb2FuNm0yUStTbGZvOUFTTGJmb2FiWDJhSHkKQndITnQ1bUtEUDFDdDhybGlpYmJ0WmFOMUk4WTJhU0pPckxCcnlzUDB5R2M2c1NLc2J4ZmRaT1hKNG8wcHA3aApscGdvMVBOc2NZSnF0MFlGQVZFZVJhZHloQjFHU2pHOGttWUhhV3E0SVFLQmdRRFY5dFFBUkZNNVlQeWJvZnp6CkY0cm5NKzZuVHQ4YzlyTTlCN20rSC9Fbm5aRHdLWjRFZVVreVdkd2krSStsVUlPbW5vUWFLaExJSENBNUpEankKSEcyTDRuWC8wNmExV1FicUdtK1BoQUQrd2hSQ2s1UzdQS2ZqaWgvSVN0a0psMzRMRWpBY3BOMjFjTElMWlI5Rgp4MDFvSGVaUEtaa2RVUm94SlJlbU9zUW5td0tCZ1FDKzRvMDA3WEc0UC9aY2VLUUxickkySVZpZkRmNi85cnZWCnU2Ly9VQ1dySUsvK0pTVTdZRzlTbVV6OWpxN090aVVMeEFrZDNYZWJUL1VTU29rWWZsVENpZE5yRkdvb2c4dVcKMm9seDdPNTNjSlNpdHJFTWRxZElyTTNxUDAzRWNMM1NHV2RFTWI1ek16amliMEpFZy84WEo4VTRKOGxxalJZawpJNGppTGZjazhRS0JnUUN1RDkzSjFjdXFteWNGYVZPQll6Nmg5aEtKVTFXWWwyMmJFYVRlY2RmYWRZbUtoeURBCjNQWmFHdFpHTU5XVEZCdjF1WEVZWSsvaGtWZjVoR2s3TlRzOUdGOE1YdDk1R2xMUXNlN2Qzdko4VGVEemhHSHoKckZXY2V5cDk0NzRLMm80MGJYaHNaUWJrYXlpOHNtZ3RlUVNVMWZZZG9tYW9mZWtEMy93TVJEUjlUd0tCZ0I0Sgp0cURTa0tlbTRSVFNJelp0Vi92dXFXeDJIbFdFOVRBQUZnVCtUcjhzS0tXOG9LeEVVNkJTNFd1eExVNzRHTUpPCk9nVUtHYzFGK3RIREN1bmd6MlBCbnBSbUpRSjFJdTk5T2szRU5BNTFsOTQzRTR0YmpKMkNHL0UxdnY0Zm82OVIKTWcvb0tBa20wVmZJbkV2OTNZRitUNm16a1IyRVVDeWxydCt3emdoeEFvR0FDejQxY3RsYmc5SUQwREhsb1BpdAp3aWRLQkM5QVZvTUZLNFVGQXF0ZFcrT0o3RktOalRGTGltQkp5WUdFOFRHWmJZWm9CdTc1VHpvWTcrZmxoeTRMCnBSbVVtRW9TRHVtR1MvU0REUzRJS1hLZi9YWSsrc0d4QnNCbFE3UEhWNDRFN3V6SVllQ20zRFh1bjVFcFowNTYKY2NIdVowL096a3ZYZ29LcVB0RmtySlU9Ci0tLS0tRU5EIFBSSVZBVEUgS0VZLS0tLS0K", "encoding": "base64", "item": [ "key", { "ca": "self-sign", "dns": [ "localhost" ], "name": "quadlet_demo" } ], "source": "/etc/pki/tls/private/quadlet_demo.key" } ok: [managed-node2] => (item=['ca', {'name': 'quadlet_demo', 'dns': ['localhost'], 'ca': 'self-sign'}]) => { "ansible_loop_var": "item", "changed": false, "content": "LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSURnekNDQW11Z0F3SUJBZ0lSQU5namRkOVJPa3BCZ25LejErNTJhaG93RFFZSktvWklodmNOQVFFTEJRQXcKVURFZ01CNEdBMVVFQXd3WFRHOWpZV3dnVTJsbmJtbHVaeUJCZFhSb2IzSnBkSGt4TERBcUJnTlZCQU1NSTJRNApNak0zTldSbUxUVXhNMkUwWVRReExUZ3lOekppTTJRM0xXVmxOelkyWVRFNU1CNFhEVEkxTURFd056RTBNVE16Ck9Gb1hEVEkyTURFd056RTBNVE16TjFvd0ZERVNNQkFHQTFVRUF4TUpiRzlqWVd4b2IzTjBNSUlCSWpBTkJna3EKaGtpRzl3MEJBUUVGQUFPQ0FROEFNSUlCQ2dLQ0FRRUFuNHFMUklwTjJmVU1BcUc4eGNKT21kZVNsR3RvdjdNVApHZFpzK0lRMUdzSkh4Zm01KzJUR3ZHb3NvUnBkd3duOWJCdU5yb0hrUDFXUW9MVnJGMVJ6NnlKdEoxRThzY3ROCnlvRmZpbmJxQkxmOGZQTm5iLzhJVWVGN1AzbDBGOGRWODNvTXVKR0srSVp2dE5VbkRJZmlqTXQ2RVlRS0JUNUoKNExKK0gwL2Ixc1pHUURyaCtoMUZEUXhrRk53aXMxRlVxZEdJTndVZHJTSkRxTncwSmdRVUh4TW4xUCtHemVPOApnMDNEVHROejdxOW8xZDRjNkJTcStCZzMrRHV1SndRRUd2MmJXaW5HdjgzditQb28wQUxsaXdjc0QxQlNpTXRWCis3K2VMR2wreS9TZFhEQ054RzFxQWF5NUQ0UE9oY2NvcUhhVWNnSVFqVERSWENEUFhjZ1U2d0lEQVFBQm80R1QKTUlHUU1Bc0dBMVVkRHdRRUF3SUZvREFVQmdOVkhSRUVEVEFMZ2dsc2IyTmhiR2h2YzNRd0hRWURWUjBsQkJZdwpGQVlJS3dZQkJRVUhBd0VHQ0NzR0FRVUZCd01DTUF3R0ExVWRFd0VCL3dRQ01BQXdIUVlEVlIwT0JCWUVGUE9hClNzWHVZaXVDbS9wQmRzUXIwb2htOU5HN01COEdBMVVkSXdRWU1CYUFGRytTQnBjL1YrT1F2SjBHRnJqbGtTTkQKZE1sS01BMEdDU3FHU0liM0RRRUJDd1VBQTRJQkFRQjE1S0s2N2RuMEFPSVlmR1JZbU1pTTN0YjRaS2JtMFlrNgpyM0dGa2dwWlJVaGFuUVRlcXRhMFFhYk1VbW9wdXdNZXUyWFcySmU5bTBwTG1SK1hrRVgycjFSTGpibytidGorCkJTVElMRkQ0TjA5YzdKKzVSZENESnNKR2U0c0UzTmpLaEdTV1k5Ym9Mb1kzbXpxcVY5U1JSV3gwMlBGSDdGMXMKaFVITGZFV201WjcyWk1Lc0h6MWRGUnA4RlNhZ21FRHN1c3gzcXRwa3pZSkN3bEg1SzhEU094SXBjQVpUaXFxSQppUWRrM0laa2tnNjN2bUFDbjJqNzkxQStudm1nWXl0Uy91eWtIMjIzV3FjbzJ4eTdObFVpTUNzTnlCZFY0RmxiCnFkRTJUcG4wMUkvVXQzYnY4bjdNak0yV3ZSUVIxU01vZC9KVHBSTkRmcTBwSDNSb3oyTWoKLS0tLS1FTkQgQ0VSVElGSUNBVEUtLS0tLQo=", "encoding": "base64", "item": [ "ca", { "ca": "self-sign", "dns": [ "localhost" ], "name": "quadlet_demo" } ], "source": "/etc/pki/tls/certs/quadlet_demo.crt" } TASK [fedora.linux_system_roles.certificate : Create return data] ************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:160 Tuesday 07 January 2025 09:13:40 -0500 (0:00:01.216) 0:00:12.950 ******* ok: [managed-node2] => { "ansible_facts": { "certificate_test_certs": { "quadlet_demo": { "ca": "/etc/pki/tls/certs/quadlet_demo.crt", "ca_content": "-----BEGIN CERTIFICATE-----\nMIIDgzCCAmugAwIBAgIRANgjdd9ROkpBgnKz1+52ahowDQYJKoZIhvcNAQELBQAw\nUDEgMB4GA1UEAwwXTG9jYWwgU2lnbmluZyBBdXRob3JpdHkxLDAqBgNVBAMMI2Q4\nMjM3NWRmLTUxM2E0YTQxLTgyNzJiM2Q3LWVlNzY2YTE5MB4XDTI1MDEwNzE0MTMz\nOFoXDTI2MDEwNzE0MTMzN1owFDESMBAGA1UEAxMJbG9jYWxob3N0MIIBIjANBgkq\nhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAn4qLRIpN2fUMAqG8xcJOmdeSlGtov7MT\nGdZs+IQ1GsJHxfm5+2TGvGosoRpdwwn9bBuNroHkP1WQoLVrF1Rz6yJtJ1E8sctN\nyoFfinbqBLf8fPNnb/8IUeF7P3l0F8dV83oMuJGK+IZvtNUnDIfijMt6EYQKBT5J\n4LJ+H0/b1sZGQDrh+h1FDQxkFNwis1FUqdGINwUdrSJDqNw0JgQUHxMn1P+GzeO8\ng03DTtNz7q9o1d4c6BSq+Bg3+DuuJwQEGv2bWinGv83v+Poo0ALliwcsD1BSiMtV\n+7+eLGl+y/SdXDCNxG1qAay5D4POhccoqHaUcgIQjTDRXCDPXcgU6wIDAQABo4GT\nMIGQMAsGA1UdDwQEAwIFoDAUBgNVHREEDTALgglsb2NhbGhvc3QwHQYDVR0lBBYw\nFAYIKwYBBQUHAwEGCCsGAQUFBwMCMAwGA1UdEwEB/wQCMAAwHQYDVR0OBBYEFPOa\nSsXuYiuCm/pBdsQr0ohm9NG7MB8GA1UdIwQYMBaAFG+SBpc/V+OQvJ0GFrjlkSND\ndMlKMA0GCSqGSIb3DQEBCwUAA4IBAQB15KK67dn0AOIYfGRYmMiM3tb4ZKbm0Yk6\nr3GFkgpZRUhanQTeqta0QabMUmopuwMeu2XW2Je9m0pLmR+XkEX2r1RLjbo+btj+\nBSTILFD4N09c7J+5RdCDJsJGe4sE3NjKhGSWY9boLoY3mzqqV9SRRWx02PFH7F1s\nhUHLfEWm5Z72ZMKsHz1dFRp8FSagmEDsusx3qtpkzYJCwlH5K8DSOxIpcAZTiqqI\niQdk3IZkkg63vmACn2j791A+nvmgYytS/uykH223Wqco2xy7NlUiMCsNyBdV4Flb\nqdE2Tpn01I/Ut3bv8n7MjM2WvRQR1SMod/JTpRNDfq0pH3Roz2Mj\n-----END CERTIFICATE-----\n", "cert": "/etc/pki/tls/certs/quadlet_demo.crt", "cert_content": "-----BEGIN CERTIFICATE-----\nMIIDgzCCAmugAwIBAgIRANgjdd9ROkpBgnKz1+52ahowDQYJKoZIhvcNAQELBQAw\nUDEgMB4GA1UEAwwXTG9jYWwgU2lnbmluZyBBdXRob3JpdHkxLDAqBgNVBAMMI2Q4\nMjM3NWRmLTUxM2E0YTQxLTgyNzJiM2Q3LWVlNzY2YTE5MB4XDTI1MDEwNzE0MTMz\nOFoXDTI2MDEwNzE0MTMzN1owFDESMBAGA1UEAxMJbG9jYWxob3N0MIIBIjANBgkq\nhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAn4qLRIpN2fUMAqG8xcJOmdeSlGtov7MT\nGdZs+IQ1GsJHxfm5+2TGvGosoRpdwwn9bBuNroHkP1WQoLVrF1Rz6yJtJ1E8sctN\nyoFfinbqBLf8fPNnb/8IUeF7P3l0F8dV83oMuJGK+IZvtNUnDIfijMt6EYQKBT5J\n4LJ+H0/b1sZGQDrh+h1FDQxkFNwis1FUqdGINwUdrSJDqNw0JgQUHxMn1P+GzeO8\ng03DTtNz7q9o1d4c6BSq+Bg3+DuuJwQEGv2bWinGv83v+Poo0ALliwcsD1BSiMtV\n+7+eLGl+y/SdXDCNxG1qAay5D4POhccoqHaUcgIQjTDRXCDPXcgU6wIDAQABo4GT\nMIGQMAsGA1UdDwQEAwIFoDAUBgNVHREEDTALgglsb2NhbGhvc3QwHQYDVR0lBBYw\nFAYIKwYBBQUHAwEGCCsGAQUFBwMCMAwGA1UdEwEB/wQCMAAwHQYDVR0OBBYEFPOa\nSsXuYiuCm/pBdsQr0ohm9NG7MB8GA1UdIwQYMBaAFG+SBpc/V+OQvJ0GFrjlkSND\ndMlKMA0GCSqGSIb3DQEBCwUAA4IBAQB15KK67dn0AOIYfGRYmMiM3tb4ZKbm0Yk6\nr3GFkgpZRUhanQTeqta0QabMUmopuwMeu2XW2Je9m0pLmR+XkEX2r1RLjbo+btj+\nBSTILFD4N09c7J+5RdCDJsJGe4sE3NjKhGSWY9boLoY3mzqqV9SRRWx02PFH7F1s\nhUHLfEWm5Z72ZMKsHz1dFRp8FSagmEDsusx3qtpkzYJCwlH5K8DSOxIpcAZTiqqI\niQdk3IZkkg63vmACn2j791A+nvmgYytS/uykH223Wqco2xy7NlUiMCsNyBdV4Flb\nqdE2Tpn01I/Ut3bv8n7MjM2WvRQR1SMod/JTpRNDfq0pH3Roz2Mj\n-----END CERTIFICATE-----\n", "key": "/etc/pki/tls/private/quadlet_demo.key", "key_content": "-----BEGIN PRIVATE KEY-----\nMIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQCfiotEik3Z9QwC\nobzFwk6Z15KUa2i/sxMZ1mz4hDUawkfF+bn7ZMa8aiyhGl3DCf1sG42ugeQ/VZCg\ntWsXVHPrIm0nUTyxy03KgV+KduoEt/x882dv/whR4Xs/eXQXx1Xzegy4kYr4hm+0\n1ScMh+KMy3oRhAoFPkngsn4fT9vWxkZAOuH6HUUNDGQU3CKzUVSp0Yg3BR2tIkOo\n3DQmBBQfEyfU/4bN47yDTcNO03Pur2jV3hzoFKr4GDf4O64nBAQa/ZtaKca/ze/4\n+ijQAuWLBywPUFKIy1X7v54saX7L9J1cMI3EbWoBrLkPg86FxyiodpRyAhCNMNFc\nIM9dyBTrAgMBAAECggEAK2q9qcJlm1oHkbPINp1BXLaAymlMkVa/bPmgoCUCeqzC\ncYsbZOGw/9d5G8KcH5bZY5FABmyM2/uCJ2oXobq27RdqUxfVh6EHI4P/pyPjXbNg\nP/TRgnYKIpZVPxnjURFLP75INfZNtoKHZdPYE53rNlutdZTUUUb5Pe/1SLon3pOa\nn52/WqpG0ZU9gFSV7LqisBwL/b1O7pxqQxbaXXoan6m2Q+Slfo9ASLbfoabX2aHy\nBwHNt5mKDP1Ct8rliibbtZaN1I8Y2aSJOrLBrysP0yGc6sSKsbxfdZOXJ4o0pp7h\nlpgo1PNscYJqt0YFAVEeRadyhB1GSjG8kmYHaWq4IQKBgQDV9tQARFM5YPybofzz\nF4rnM+6nTt8c9rM9B7m+H/EnnZDwKZ4EeUkyWdwi+I+lUIOmnoQaKhLIHCA5JDjy\nHG2L4nX/06a1WQbqGm+PhAD+whRCk5S7PKfjih/IStkJl34LEjAcpN21cLILZR9F\nx01oHeZPKZkdURoxJRemOsQnmwKBgQC+4o007XG4P/ZceKQLbrI2IVifDf6/9rvV\nu6//UCWrIK/+JSU7YG9SmUz9jq7OtiULxAkd3XebT/USSokYflTCidNrFGoog8uW\n2olx7O53cJSitrEMdqdIrM3qP03EcL3SGWdEMb5zMzjib0JEg/8XJ8U4J8lqjRYk\nI4jiLfck8QKBgQCuD93J1cuqmycFaVOBYz6h9hKJU1WYl22bEaTecdfadYmKhyDA\n3PZaGtZGMNWTFBv1uXEYY+/hkVf5hGk7NTs9GF8MXt95GlLQse7d3vJ8TeDzhGHz\nrFWceyp9474K2o40bXhsZQbkayi8smgteQSU1fYdomaofekD3/wMRDR9TwKBgB4J\ntqDSkKem4RTSIzZtV/vuqWx2HlWE9TAAFgT+Tr8sKKW8oKxEU6BS4WuxLU74GMJO\nOgUKGc1F+tHDCungz2PBnpRmJQJ1Iu99Ok3ENA51l943E4tbjJ2CG/E1vv4fo69R\nMg/oKAkm0VfInEv93YF+T6mzkR2EUCylrt+wzghxAoGACz41ctlbg9ID0DHloPit\nwidKBC9AVoMFK4UFAqtdW+OJ7FKNjTFLimBJyYGE8TGZbYZoBu75TzoY7+flhy4L\npRmUmEoSDumGS/SDDS4IKXKf/XY++sGxBsBlQ7PHV44E7uzIYeCm3DXun5EpZ056\nccHuZ0/OzkvXgoKqPtFkrJU=\n-----END PRIVATE KEY-----\n" } } }, "changed": false } TASK [fedora.linux_system_roles.certificate : Stop tracking certificates] ****** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:176 Tuesday 07 January 2025 09:13:40 -0500 (0:00:00.051) 0:00:13.002 ******* ok: [managed-node2] => (item={'cert': '/etc/pki/tls/certs/quadlet_demo.crt', 'cert_content': '-----BEGIN CERTIFICATE-----\nMIIDgzCCAmugAwIBAgIRANgjdd9ROkpBgnKz1+52ahowDQYJKoZIhvcNAQELBQAw\nUDEgMB4GA1UEAwwXTG9jYWwgU2lnbmluZyBBdXRob3JpdHkxLDAqBgNVBAMMI2Q4\nMjM3NWRmLTUxM2E0YTQxLTgyNzJiM2Q3LWVlNzY2YTE5MB4XDTI1MDEwNzE0MTMz\nOFoXDTI2MDEwNzE0MTMzN1owFDESMBAGA1UEAxMJbG9jYWxob3N0MIIBIjANBgkq\nhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAn4qLRIpN2fUMAqG8xcJOmdeSlGtov7MT\nGdZs+IQ1GsJHxfm5+2TGvGosoRpdwwn9bBuNroHkP1WQoLVrF1Rz6yJtJ1E8sctN\nyoFfinbqBLf8fPNnb/8IUeF7P3l0F8dV83oMuJGK+IZvtNUnDIfijMt6EYQKBT5J\n4LJ+H0/b1sZGQDrh+h1FDQxkFNwis1FUqdGINwUdrSJDqNw0JgQUHxMn1P+GzeO8\ng03DTtNz7q9o1d4c6BSq+Bg3+DuuJwQEGv2bWinGv83v+Poo0ALliwcsD1BSiMtV\n+7+eLGl+y/SdXDCNxG1qAay5D4POhccoqHaUcgIQjTDRXCDPXcgU6wIDAQABo4GT\nMIGQMAsGA1UdDwQEAwIFoDAUBgNVHREEDTALgglsb2NhbGhvc3QwHQYDVR0lBBYw\nFAYIKwYBBQUHAwEGCCsGAQUFBwMCMAwGA1UdEwEB/wQCMAAwHQYDVR0OBBYEFPOa\nSsXuYiuCm/pBdsQr0ohm9NG7MB8GA1UdIwQYMBaAFG+SBpc/V+OQvJ0GFrjlkSND\ndMlKMA0GCSqGSIb3DQEBCwUAA4IBAQB15KK67dn0AOIYfGRYmMiM3tb4ZKbm0Yk6\nr3GFkgpZRUhanQTeqta0QabMUmopuwMeu2XW2Je9m0pLmR+XkEX2r1RLjbo+btj+\nBSTILFD4N09c7J+5RdCDJsJGe4sE3NjKhGSWY9boLoY3mzqqV9SRRWx02PFH7F1s\nhUHLfEWm5Z72ZMKsHz1dFRp8FSagmEDsusx3qtpkzYJCwlH5K8DSOxIpcAZTiqqI\niQdk3IZkkg63vmACn2j791A+nvmgYytS/uykH223Wqco2xy7NlUiMCsNyBdV4Flb\nqdE2Tpn01I/Ut3bv8n7MjM2WvRQR1SMod/JTpRNDfq0pH3Roz2Mj\n-----END CERTIFICATE-----\n', 'key': '/etc/pki/tls/private/quadlet_demo.key', 'key_content': '-----BEGIN PRIVATE KEY-----\nMIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQCfiotEik3Z9QwC\nobzFwk6Z15KUa2i/sxMZ1mz4hDUawkfF+bn7ZMa8aiyhGl3DCf1sG42ugeQ/VZCg\ntWsXVHPrIm0nUTyxy03KgV+KduoEt/x882dv/whR4Xs/eXQXx1Xzegy4kYr4hm+0\n1ScMh+KMy3oRhAoFPkngsn4fT9vWxkZAOuH6HUUNDGQU3CKzUVSp0Yg3BR2tIkOo\n3DQmBBQfEyfU/4bN47yDTcNO03Pur2jV3hzoFKr4GDf4O64nBAQa/ZtaKca/ze/4\n+ijQAuWLBywPUFKIy1X7v54saX7L9J1cMI3EbWoBrLkPg86FxyiodpRyAhCNMNFc\nIM9dyBTrAgMBAAECggEAK2q9qcJlm1oHkbPINp1BXLaAymlMkVa/bPmgoCUCeqzC\ncYsbZOGw/9d5G8KcH5bZY5FABmyM2/uCJ2oXobq27RdqUxfVh6EHI4P/pyPjXbNg\nP/TRgnYKIpZVPxnjURFLP75INfZNtoKHZdPYE53rNlutdZTUUUb5Pe/1SLon3pOa\nn52/WqpG0ZU9gFSV7LqisBwL/b1O7pxqQxbaXXoan6m2Q+Slfo9ASLbfoabX2aHy\nBwHNt5mKDP1Ct8rliibbtZaN1I8Y2aSJOrLBrysP0yGc6sSKsbxfdZOXJ4o0pp7h\nlpgo1PNscYJqt0YFAVEeRadyhB1GSjG8kmYHaWq4IQKBgQDV9tQARFM5YPybofzz\nF4rnM+6nTt8c9rM9B7m+H/EnnZDwKZ4EeUkyWdwi+I+lUIOmnoQaKhLIHCA5JDjy\nHG2L4nX/06a1WQbqGm+PhAD+whRCk5S7PKfjih/IStkJl34LEjAcpN21cLILZR9F\nx01oHeZPKZkdURoxJRemOsQnmwKBgQC+4o007XG4P/ZceKQLbrI2IVifDf6/9rvV\nu6//UCWrIK/+JSU7YG9SmUz9jq7OtiULxAkd3XebT/USSokYflTCidNrFGoog8uW\n2olx7O53cJSitrEMdqdIrM3qP03EcL3SGWdEMb5zMzjib0JEg/8XJ8U4J8lqjRYk\nI4jiLfck8QKBgQCuD93J1cuqmycFaVOBYz6h9hKJU1WYl22bEaTecdfadYmKhyDA\n3PZaGtZGMNWTFBv1uXEYY+/hkVf5hGk7NTs9GF8MXt95GlLQse7d3vJ8TeDzhGHz\nrFWceyp9474K2o40bXhsZQbkayi8smgteQSU1fYdomaofekD3/wMRDR9TwKBgB4J\ntqDSkKem4RTSIzZtV/vuqWx2HlWE9TAAFgT+Tr8sKKW8oKxEU6BS4WuxLU74GMJO\nOgUKGc1F+tHDCungz2PBnpRmJQJ1Iu99Ok3ENA51l943E4tbjJ2CG/E1vv4fo69R\nMg/oKAkm0VfInEv93YF+T6mzkR2EUCylrt+wzghxAoGACz41ctlbg9ID0DHloPit\nwidKBC9AVoMFK4UFAqtdW+OJ7FKNjTFLimBJyYGE8TGZbYZoBu75TzoY7+flhy4L\npRmUmEoSDumGS/SDDS4IKXKf/XY++sGxBsBlQ7PHV44E7uzIYeCm3DXun5EpZ056\nccHuZ0/OzkvXgoKqPtFkrJU=\n-----END PRIVATE KEY-----\n', 'ca': '/etc/pki/tls/certs/quadlet_demo.crt', 'ca_content': '-----BEGIN CERTIFICATE-----\nMIIDgzCCAmugAwIBAgIRANgjdd9ROkpBgnKz1+52ahowDQYJKoZIhvcNAQELBQAw\nUDEgMB4GA1UEAwwXTG9jYWwgU2lnbmluZyBBdXRob3JpdHkxLDAqBgNVBAMMI2Q4\nMjM3NWRmLTUxM2E0YTQxLTgyNzJiM2Q3LWVlNzY2YTE5MB4XDTI1MDEwNzE0MTMz\nOFoXDTI2MDEwNzE0MTMzN1owFDESMBAGA1UEAxMJbG9jYWxob3N0MIIBIjANBgkq\nhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAn4qLRIpN2fUMAqG8xcJOmdeSlGtov7MT\nGdZs+IQ1GsJHxfm5+2TGvGosoRpdwwn9bBuNroHkP1WQoLVrF1Rz6yJtJ1E8sctN\nyoFfinbqBLf8fPNnb/8IUeF7P3l0F8dV83oMuJGK+IZvtNUnDIfijMt6EYQKBT5J\n4LJ+H0/b1sZGQDrh+h1FDQxkFNwis1FUqdGINwUdrSJDqNw0JgQUHxMn1P+GzeO8\ng03DTtNz7q9o1d4c6BSq+Bg3+DuuJwQEGv2bWinGv83v+Poo0ALliwcsD1BSiMtV\n+7+eLGl+y/SdXDCNxG1qAay5D4POhccoqHaUcgIQjTDRXCDPXcgU6wIDAQABo4GT\nMIGQMAsGA1UdDwQEAwIFoDAUBgNVHREEDTALgglsb2NhbGhvc3QwHQYDVR0lBBYw\nFAYIKwYBBQUHAwEGCCsGAQUFBwMCMAwGA1UdEwEB/wQCMAAwHQYDVR0OBBYEFPOa\nSsXuYiuCm/pBdsQr0ohm9NG7MB8GA1UdIwQYMBaAFG+SBpc/V+OQvJ0GFrjlkSND\ndMlKMA0GCSqGSIb3DQEBCwUAA4IBAQB15KK67dn0AOIYfGRYmMiM3tb4ZKbm0Yk6\nr3GFkgpZRUhanQTeqta0QabMUmopuwMeu2XW2Je9m0pLmR+XkEX2r1RLjbo+btj+\nBSTILFD4N09c7J+5RdCDJsJGe4sE3NjKhGSWY9boLoY3mzqqV9SRRWx02PFH7F1s\nhUHLfEWm5Z72ZMKsHz1dFRp8FSagmEDsusx3qtpkzYJCwlH5K8DSOxIpcAZTiqqI\niQdk3IZkkg63vmACn2j791A+nvmgYytS/uykH223Wqco2xy7NlUiMCsNyBdV4Flb\nqdE2Tpn01I/Ut3bv8n7MjM2WvRQR1SMod/JTpRNDfq0pH3Roz2Mj\n-----END CERTIFICATE-----\n'}) => { "ansible_loop_var": "item", "changed": false, "cmd": [ "getcert", "stop-tracking", "-f", "/etc/pki/tls/certs/quadlet_demo.crt" ], "delta": "0:00:00.026881", "end": "2025-01-07 09:13:40.494213", "item": { "ca": "/etc/pki/tls/certs/quadlet_demo.crt", "ca_content": "-----BEGIN CERTIFICATE-----\nMIIDgzCCAmugAwIBAgIRANgjdd9ROkpBgnKz1+52ahowDQYJKoZIhvcNAQELBQAw\nUDEgMB4GA1UEAwwXTG9jYWwgU2lnbmluZyBBdXRob3JpdHkxLDAqBgNVBAMMI2Q4\nMjM3NWRmLTUxM2E0YTQxLTgyNzJiM2Q3LWVlNzY2YTE5MB4XDTI1MDEwNzE0MTMz\nOFoXDTI2MDEwNzE0MTMzN1owFDESMBAGA1UEAxMJbG9jYWxob3N0MIIBIjANBgkq\nhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAn4qLRIpN2fUMAqG8xcJOmdeSlGtov7MT\nGdZs+IQ1GsJHxfm5+2TGvGosoRpdwwn9bBuNroHkP1WQoLVrF1Rz6yJtJ1E8sctN\nyoFfinbqBLf8fPNnb/8IUeF7P3l0F8dV83oMuJGK+IZvtNUnDIfijMt6EYQKBT5J\n4LJ+H0/b1sZGQDrh+h1FDQxkFNwis1FUqdGINwUdrSJDqNw0JgQUHxMn1P+GzeO8\ng03DTtNz7q9o1d4c6BSq+Bg3+DuuJwQEGv2bWinGv83v+Poo0ALliwcsD1BSiMtV\n+7+eLGl+y/SdXDCNxG1qAay5D4POhccoqHaUcgIQjTDRXCDPXcgU6wIDAQABo4GT\nMIGQMAsGA1UdDwQEAwIFoDAUBgNVHREEDTALgglsb2NhbGhvc3QwHQYDVR0lBBYw\nFAYIKwYBBQUHAwEGCCsGAQUFBwMCMAwGA1UdEwEB/wQCMAAwHQYDVR0OBBYEFPOa\nSsXuYiuCm/pBdsQr0ohm9NG7MB8GA1UdIwQYMBaAFG+SBpc/V+OQvJ0GFrjlkSND\ndMlKMA0GCSqGSIb3DQEBCwUAA4IBAQB15KK67dn0AOIYfGRYmMiM3tb4ZKbm0Yk6\nr3GFkgpZRUhanQTeqta0QabMUmopuwMeu2XW2Je9m0pLmR+XkEX2r1RLjbo+btj+\nBSTILFD4N09c7J+5RdCDJsJGe4sE3NjKhGSWY9boLoY3mzqqV9SRRWx02PFH7F1s\nhUHLfEWm5Z72ZMKsHz1dFRp8FSagmEDsusx3qtpkzYJCwlH5K8DSOxIpcAZTiqqI\niQdk3IZkkg63vmACn2j791A+nvmgYytS/uykH223Wqco2xy7NlUiMCsNyBdV4Flb\nqdE2Tpn01I/Ut3bv8n7MjM2WvRQR1SMod/JTpRNDfq0pH3Roz2Mj\n-----END CERTIFICATE-----\n", "cert": "/etc/pki/tls/certs/quadlet_demo.crt", "cert_content": "-----BEGIN CERTIFICATE-----\nMIIDgzCCAmugAwIBAgIRANgjdd9ROkpBgnKz1+52ahowDQYJKoZIhvcNAQELBQAw\nUDEgMB4GA1UEAwwXTG9jYWwgU2lnbmluZyBBdXRob3JpdHkxLDAqBgNVBAMMI2Q4\nMjM3NWRmLTUxM2E0YTQxLTgyNzJiM2Q3LWVlNzY2YTE5MB4XDTI1MDEwNzE0MTMz\nOFoXDTI2MDEwNzE0MTMzN1owFDESMBAGA1UEAxMJbG9jYWxob3N0MIIBIjANBgkq\nhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAn4qLRIpN2fUMAqG8xcJOmdeSlGtov7MT\nGdZs+IQ1GsJHxfm5+2TGvGosoRpdwwn9bBuNroHkP1WQoLVrF1Rz6yJtJ1E8sctN\nyoFfinbqBLf8fPNnb/8IUeF7P3l0F8dV83oMuJGK+IZvtNUnDIfijMt6EYQKBT5J\n4LJ+H0/b1sZGQDrh+h1FDQxkFNwis1FUqdGINwUdrSJDqNw0JgQUHxMn1P+GzeO8\ng03DTtNz7q9o1d4c6BSq+Bg3+DuuJwQEGv2bWinGv83v+Poo0ALliwcsD1BSiMtV\n+7+eLGl+y/SdXDCNxG1qAay5D4POhccoqHaUcgIQjTDRXCDPXcgU6wIDAQABo4GT\nMIGQMAsGA1UdDwQEAwIFoDAUBgNVHREEDTALgglsb2NhbGhvc3QwHQYDVR0lBBYw\nFAYIKwYBBQUHAwEGCCsGAQUFBwMCMAwGA1UdEwEB/wQCMAAwHQYDVR0OBBYEFPOa\nSsXuYiuCm/pBdsQr0ohm9NG7MB8GA1UdIwQYMBaAFG+SBpc/V+OQvJ0GFrjlkSND\ndMlKMA0GCSqGSIb3DQEBCwUAA4IBAQB15KK67dn0AOIYfGRYmMiM3tb4ZKbm0Yk6\nr3GFkgpZRUhanQTeqta0QabMUmopuwMeu2XW2Je9m0pLmR+XkEX2r1RLjbo+btj+\nBSTILFD4N09c7J+5RdCDJsJGe4sE3NjKhGSWY9boLoY3mzqqV9SRRWx02PFH7F1s\nhUHLfEWm5Z72ZMKsHz1dFRp8FSagmEDsusx3qtpkzYJCwlH5K8DSOxIpcAZTiqqI\niQdk3IZkkg63vmACn2j791A+nvmgYytS/uykH223Wqco2xy7NlUiMCsNyBdV4Flb\nqdE2Tpn01I/Ut3bv8n7MjM2WvRQR1SMod/JTpRNDfq0pH3Roz2Mj\n-----END CERTIFICATE-----\n", "key": "/etc/pki/tls/private/quadlet_demo.key", "key_content": "-----BEGIN PRIVATE KEY-----\nMIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQCfiotEik3Z9QwC\nobzFwk6Z15KUa2i/sxMZ1mz4hDUawkfF+bn7ZMa8aiyhGl3DCf1sG42ugeQ/VZCg\ntWsXVHPrIm0nUTyxy03KgV+KduoEt/x882dv/whR4Xs/eXQXx1Xzegy4kYr4hm+0\n1ScMh+KMy3oRhAoFPkngsn4fT9vWxkZAOuH6HUUNDGQU3CKzUVSp0Yg3BR2tIkOo\n3DQmBBQfEyfU/4bN47yDTcNO03Pur2jV3hzoFKr4GDf4O64nBAQa/ZtaKca/ze/4\n+ijQAuWLBywPUFKIy1X7v54saX7L9J1cMI3EbWoBrLkPg86FxyiodpRyAhCNMNFc\nIM9dyBTrAgMBAAECggEAK2q9qcJlm1oHkbPINp1BXLaAymlMkVa/bPmgoCUCeqzC\ncYsbZOGw/9d5G8KcH5bZY5FABmyM2/uCJ2oXobq27RdqUxfVh6EHI4P/pyPjXbNg\nP/TRgnYKIpZVPxnjURFLP75INfZNtoKHZdPYE53rNlutdZTUUUb5Pe/1SLon3pOa\nn52/WqpG0ZU9gFSV7LqisBwL/b1O7pxqQxbaXXoan6m2Q+Slfo9ASLbfoabX2aHy\nBwHNt5mKDP1Ct8rliibbtZaN1I8Y2aSJOrLBrysP0yGc6sSKsbxfdZOXJ4o0pp7h\nlpgo1PNscYJqt0YFAVEeRadyhB1GSjG8kmYHaWq4IQKBgQDV9tQARFM5YPybofzz\nF4rnM+6nTt8c9rM9B7m+H/EnnZDwKZ4EeUkyWdwi+I+lUIOmnoQaKhLIHCA5JDjy\nHG2L4nX/06a1WQbqGm+PhAD+whRCk5S7PKfjih/IStkJl34LEjAcpN21cLILZR9F\nx01oHeZPKZkdURoxJRemOsQnmwKBgQC+4o007XG4P/ZceKQLbrI2IVifDf6/9rvV\nu6//UCWrIK/+JSU7YG9SmUz9jq7OtiULxAkd3XebT/USSokYflTCidNrFGoog8uW\n2olx7O53cJSitrEMdqdIrM3qP03EcL3SGWdEMb5zMzjib0JEg/8XJ8U4J8lqjRYk\nI4jiLfck8QKBgQCuD93J1cuqmycFaVOBYz6h9hKJU1WYl22bEaTecdfadYmKhyDA\n3PZaGtZGMNWTFBv1uXEYY+/hkVf5hGk7NTs9GF8MXt95GlLQse7d3vJ8TeDzhGHz\nrFWceyp9474K2o40bXhsZQbkayi8smgteQSU1fYdomaofekD3/wMRDR9TwKBgB4J\ntqDSkKem4RTSIzZtV/vuqWx2HlWE9TAAFgT+Tr8sKKW8oKxEU6BS4WuxLU74GMJO\nOgUKGc1F+tHDCungz2PBnpRmJQJ1Iu99Ok3ENA51l943E4tbjJ2CG/E1vv4fo69R\nMg/oKAkm0VfInEv93YF+T6mzkR2EUCylrt+wzghxAoGACz41ctlbg9ID0DHloPit\nwidKBC9AVoMFK4UFAqtdW+OJ7FKNjTFLimBJyYGE8TGZbYZoBu75TzoY7+flhy4L\npRmUmEoSDumGS/SDDS4IKXKf/XY++sGxBsBlQ7PHV44E7uzIYeCm3DXun5EpZ056\nccHuZ0/OzkvXgoKqPtFkrJU=\n-----END PRIVATE KEY-----\n" }, "rc": 0, "start": "2025-01-07 09:13:40.467332" } STDOUT: Request "20250107141338" removed. TASK [fedora.linux_system_roles.certificate : Remove files] ******************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:181 Tuesday 07 January 2025 09:13:40 -0500 (0:00:00.505) 0:00:13.508 ******* changed: [managed-node2] => (item=/etc/pki/tls/certs/quadlet_demo.crt) => { "ansible_loop_var": "item", "changed": true, "item": "/etc/pki/tls/certs/quadlet_demo.crt", "path": "/etc/pki/tls/certs/quadlet_demo.crt", "state": "absent" } changed: [managed-node2] => (item=/etc/pki/tls/private/quadlet_demo.key) => { "ansible_loop_var": "item", "changed": true, "item": "/etc/pki/tls/private/quadlet_demo.key", "path": "/etc/pki/tls/private/quadlet_demo.key", "state": "absent" } ok: [managed-node2] => (item=/etc/pki/tls/certs/quadlet_demo.crt) => { "ansible_loop_var": "item", "changed": false, "item": "/etc/pki/tls/certs/quadlet_demo.crt", "path": "/etc/pki/tls/certs/quadlet_demo.crt", "state": "absent" } TASK [Run the role] ************************************************************ task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:62 Tuesday 07 January 2025 09:13:41 -0500 (0:00:01.106) 0:00:14.615 ******* included: fedora.linux_system_roles.podman for managed-node2 TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:3 Tuesday 07 January 2025 09:13:41 -0500 (0:00:00.069) 0:00:14.684 ******* included: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure ansible_facts used by role] **** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:3 Tuesday 07 January 2025 09:13:41 -0500 (0:00:00.027) 0:00:14.712 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if system is ostree] ************ task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:11 Tuesday 07 January 2025 09:13:41 -0500 (0:00:00.038) 0:00:14.750 ******* ok: [managed-node2] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.podman : Set flag to indicate system is ostree] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:16 Tuesday 07 January 2025 09:13:42 -0500 (0:00:00.360) 0:00:15.111 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_is_ostree": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:23 Tuesday 07 January 2025 09:13:42 -0500 (0:00:00.029) 0:00:15.141 ******* ok: [managed-node2] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.podman : Set flag if transactional-update exists] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:28 Tuesday 07 January 2025 09:13:42 -0500 (0:00:00.358) 0:00:15.499 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_is_transactional": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:32 Tuesday 07 January 2025 09:13:42 -0500 (0:00:00.026) 0:00:15.525 ******* ok: [managed-node2] => (item=RedHat.yml) => { "ansible_facts": { "__podman_packages": [ "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/vars/RedHat.yml" ], "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml" } skipping: [managed-node2] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node2] => (item=CentOS_10.yml) => { "ansible_facts": { "__podman_packages": [ "iptables-nft", "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } ok: [managed-node2] => (item=CentOS_10.yml) => { "ansible_facts": { "__podman_packages": [ "iptables-nft", "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } TASK [fedora.linux_system_roles.podman : Gather the package facts] ************* task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 Tuesday 07 January 2025 09:13:42 -0500 (0:00:00.049) 0:00:15.574 ******* ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Enable copr if requested] ************* task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:10 Tuesday 07 January 2025 09:13:43 -0500 (0:00:00.967) 0:00:16.542 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "podman_use_copr | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure required packages are installed] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:14 Tuesday 07 January 2025 09:13:43 -0500 (0:00:00.049) 0:00:16.592 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "(__podman_packages | difference(ansible_facts.packages))", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:28 Tuesday 07 January 2025 09:13:43 -0500 (0:00:00.051) 0:00:16.643 ******* skipping: [managed-node2] => { "false_condition": "__podman_is_transactional | d(false)" } TASK [fedora.linux_system_roles.podman : Reboot transactional update systems] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:33 Tuesday 07 January 2025 09:13:43 -0500 (0:00:00.048) 0:00:16.692 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if reboot is needed and not set] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:38 Tuesday 07 January 2025 09:13:43 -0500 (0:00:00.049) 0:00:16.741 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get podman version] ******************* task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:46 Tuesday 07 January 2025 09:13:43 -0500 (0:00:00.049) 0:00:16.790 ******* ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "--version" ], "delta": "0:00:00.030552", "end": "2025-01-07 09:13:44.168265", "rc": 0, "start": "2025-01-07 09:13:44.137713" } STDOUT: podman version 5.3.1 TASK [fedora.linux_system_roles.podman : Set podman version] ******************* task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:52 Tuesday 07 January 2025 09:13:44 -0500 (0:00:00.392) 0:00:17.183 ******* ok: [managed-node2] => { "ansible_facts": { "podman_version": "5.3.1" }, "changed": false } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.2 or later] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:56 Tuesday 07 January 2025 09:13:44 -0500 (0:00:00.034) 0:00:17.217 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "podman_version is version(\"4.2\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:63 Tuesday 07 January 2025 09:13:44 -0500 (0:00:00.032) 0:00:17.249 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "podman_version is version(\"4.4\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:73 Tuesday 07 January 2025 09:13:44 -0500 (0:00:00.099) 0:00:17.349 ******* META: end_host conditional evaluated to False, continuing execution for managed-node2 skipping: [managed-node2] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node2" } MSG: end_host conditional evaluated to false, continuing execution for managed-node2 TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:80 Tuesday 07 January 2025 09:13:44 -0500 (0:00:00.128) 0:00:17.477 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__has_type_pod or __has_pod_file_ext or __has_pod_file_src_ext or __has_pod_template_src_ext or __has_pod_template_src_ext_j2", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:96 Tuesday 07 January 2025 09:13:44 -0500 (0:00:00.046) 0:00:17.524 ******* META: end_host conditional evaluated to False, continuing execution for managed-node2 skipping: [managed-node2] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node2" } MSG: end_host conditional evaluated to false, continuing execution for managed-node2 TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:109 Tuesday 07 January 2025 09:13:44 -0500 (0:00:00.047) 0:00:17.571 ******* included: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Tuesday 07 January 2025 09:13:44 -0500 (0:00:00.063) 0:00:17.635 ******* ok: [managed-node2] => { "ansible_facts": { "getent_passwd": { "root": [ "x", "0", "0", "Super User", "/root", "/bin/bash" ] } }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Tuesday 07 January 2025 09:13:45 -0500 (0:00:00.520) 0:00:18.155 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Tuesday 07 January 2025 09:13:45 -0500 (0:00:00.071) 0:00:18.227 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Tuesday 07 January 2025 09:13:45 -0500 (0:00:00.067) 0:00:18.294 ******* ok: [managed-node2] => { "changed": false, "stat": { "atime": 1736258992.2408626, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1736258966.2956612, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9125782, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "117023548", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Tuesday 07 January 2025 09:13:45 -0500 (0:00:00.400) 0:00:18.695 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Tuesday 07 January 2025 09:13:45 -0500 (0:00:00.033) 0:00:18.729 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Tuesday 07 January 2025 09:13:45 -0500 (0:00:00.034) 0:00:18.763 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Tuesday 07 January 2025 09:13:45 -0500 (0:00:00.033) 0:00:18.797 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Tuesday 07 January 2025 09:13:45 -0500 (0:00:00.038) 0:00:18.835 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Tuesday 07 January 2025 09:13:45 -0500 (0:00:00.049) 0:00:18.884 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Tuesday 07 January 2025 09:13:45 -0500 (0:00:00.051) 0:00:18.936 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Tuesday 07 January 2025 09:13:46 -0500 (0:00:00.037) 0:00:18.973 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set config file paths] **************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:115 Tuesday 07 January 2025 09:13:46 -0500 (0:00:00.039) 0:00:19.013 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_container_conf_file": "/etc/containers/containers.conf.d/50-systemroles.conf", "__podman_policy_json_file": "/etc/containers/policy.json", "__podman_registries_conf_file": "/etc/containers/registries.conf.d/50-systemroles.conf", "__podman_storage_conf_file": "/etc/containers/storage.conf" }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle container.conf.d] ************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:124 Tuesday 07 January 2025 09:13:46 -0500 (0:00:00.067) 0:00:19.080 ******* included: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure containers.d exists] *********** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:5 Tuesday 07 January 2025 09:13:46 -0500 (0:00:00.058) 0:00:19.139 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update container config file] ********* task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:13 Tuesday 07 January 2025 09:13:46 -0500 (0:00:00.071) 0:00:19.210 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle registries.conf.d] ************* task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:127 Tuesday 07 January 2025 09:13:46 -0500 (0:00:00.055) 0:00:19.266 ******* included: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure registries.d exists] *********** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:5 Tuesday 07 January 2025 09:13:46 -0500 (0:00:00.081) 0:00:19.347 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update registries config file] ******** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:13 Tuesday 07 January 2025 09:13:46 -0500 (0:00:00.048) 0:00:19.395 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle storage.conf] ****************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:130 Tuesday 07 January 2025 09:13:46 -0500 (0:00:00.035) 0:00:19.431 ******* included: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure storage.conf parent dir exists] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:5 Tuesday 07 January 2025 09:13:46 -0500 (0:00:00.074) 0:00:19.506 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update storage config file] *********** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:13 Tuesday 07 January 2025 09:13:46 -0500 (0:00:00.036) 0:00:19.543 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle policy.json] ******************* task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:133 Tuesday 07 January 2025 09:13:46 -0500 (0:00:00.031) 0:00:19.575 ******* included: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure policy.json parent dir exists] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:6 Tuesday 07 January 2025 09:13:46 -0500 (0:00:00.063) 0:00:19.638 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat the policy.json file] ************ task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:14 Tuesday 07 January 2025 09:13:46 -0500 (0:00:00.032) 0:00:19.671 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get the existing policy.json] ********* task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:19 Tuesday 07 January 2025 09:13:46 -0500 (0:00:00.032) 0:00:19.703 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Write new policy.json file] *********** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:25 Tuesday 07 January 2025 09:13:46 -0500 (0:00:00.037) 0:00:19.741 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [Manage firewall for specified ports] ************************************* task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:139 Tuesday 07 January 2025 09:13:46 -0500 (0:00:00.047) 0:00:19.789 ******* included: fedora.linux_system_roles.firewall for managed-node2 TASK [fedora.linux_system_roles.firewall : Setup firewalld] ******************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:2 Tuesday 07 January 2025 09:13:47 -0500 (0:00:00.175) 0:00:19.964 ******* included: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml for managed-node2 TASK [fedora.linux_system_roles.firewall : Ensure ansible_facts used by role] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:2 Tuesday 07 January 2025 09:13:47 -0500 (0:00:00.093) 0:00:20.057 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Check if system is ostree] ********** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:10 Tuesday 07 January 2025 09:13:47 -0500 (0:00:00.126) 0:00:20.184 ******* ok: [managed-node2] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.firewall : Set flag to indicate system is ostree] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:15 Tuesday 07 January 2025 09:13:47 -0500 (0:00:00.436) 0:00:20.620 ******* ok: [managed-node2] => { "ansible_facts": { "__firewall_is_ostree": false }, "changed": false } TASK [fedora.linux_system_roles.firewall : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:22 Tuesday 07 January 2025 09:13:47 -0500 (0:00:00.058) 0:00:20.678 ******* ok: [managed-node2] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.firewall : Set flag if transactional-update exists] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:27 Tuesday 07 January 2025 09:13:48 -0500 (0:00:00.382) 0:00:21.061 ******* ok: [managed-node2] => { "ansible_facts": { "__firewall_is_transactional": false }, "changed": false } TASK [fedora.linux_system_roles.firewall : Install firewalld] ****************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:31 Tuesday 07 January 2025 09:13:48 -0500 (0:00:00.045) 0:00:21.106 ******* ok: [managed-node2] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do TASK [fedora.linux_system_roles.firewall : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:43 Tuesday 07 January 2025 09:13:48 -0500 (0:00:00.776) 0:00:21.882 ******* skipping: [managed-node2] => { "false_condition": "__firewall_is_transactional | d(false)" } TASK [fedora.linux_system_roles.firewall : Reboot transactional update systems] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:48 Tuesday 07 January 2025 09:13:48 -0500 (0:00:00.043) 0:00:21.926 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Fail if reboot is needed and not set] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:53 Tuesday 07 January 2025 09:13:49 -0500 (0:00:00.036) 0:00:21.962 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Collect service facts] ************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:5 Tuesday 07 January 2025 09:13:49 -0500 (0:00:00.041) 0:00:22.004 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Attempt to stop and disable conflicting services] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:9 Tuesday 07 January 2025 09:13:49 -0500 (0:00:00.035) 0:00:22.039 ******* skipping: [managed-node2] => (item=nftables) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": "nftables", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=iptables) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": "iptables", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=ufw) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": "ufw", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => { "changed": false } MSG: All items skipped TASK [fedora.linux_system_roles.firewall : Unmask firewalld service] *********** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:22 Tuesday 07 January 2025 09:13:49 -0500 (0:00:00.040) 0:00:22.079 ******* ok: [managed-node2] => { "changed": false, "name": "firewalld", "status": { "AccessSELinuxContext": "system_u:object_r:firewalld_unit_file_t:s0", "ActiveEnterTimestamp": "Tue 2025-01-07 09:09:59 EST", "ActiveEnterTimestampMonotonic": "368272829", "ActiveExitTimestampMonotonic": "0", "ActiveState": "active", "After": "dbus-broker.service sysinit.target dbus.socket system.slice basic.target polkit.service", "AllowIsolate": "no", "AssertResult": "yes", "AssertTimestamp": "Tue 2025-01-07 09:09:58 EST", "AssertTimestampMonotonic": "367306301", "Before": "shutdown.target network-pre.target multi-user.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "BusName": "org.fedoraproject.FirewallD1", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "548452000", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "yes", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_tty_config cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Tue 2025-01-07 09:09:58 EST", "ConditionTimestampMonotonic": "367306297", "ConfigurationDirectoryMode": "0755", "Conflicts": "iptables.service shutdown.target ipset.service ebtables.service ip6tables.service", "ControlGroup": "/system.slice/firewalld.service", "ControlGroupId": "5002", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "firewalld - dynamic firewall daemon", "DeviceAllow": "char-rtc r", "DevicePolicy": "closed", "Documentation": "\"man:firewalld(1)\"", "DynamicUser": "no", "EffectiveMemoryHigh": "3698233344", "EffectiveMemoryMax": "3698233344", "EffectiveTasksMax": "22349", "EnvironmentFiles": "/etc/sysconfig/firewalld (ignore_errors=yes)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestamp": "Tue 2025-01-07 09:09:58 EST", "ExecMainHandoffTimestampMonotonic": "367337087", "ExecMainPID": "10983", "ExecMainStartTimestamp": "Tue 2025-01-07 09:09:58 EST", "ExecMainStartTimestampMonotonic": "367309888", "ExecMainStatus": "0", "ExecReload": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecReloadEx": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStart": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/usr/lib/systemd/system/firewalld.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "firewalld.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestamp": "Tue 2025-01-07 09:09:58 EST", "InactiveExitTimestampMonotonic": "367310320", "InvocationID": "25e094e5f1f7407f91f3aca634286234", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13968", "LimitNPROCSoft": "13968", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13968", "LimitSIGPENDINGSoft": "13968", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "yes", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "10983", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3127996416", "MemoryCurrent": "33341440", "MemoryDenyWriteExecute": "yes", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "35258368", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "0", "MemorySwapMax": "infinity", "MemorySwapPeak": "0", "MemoryZSwapCurrent": "0", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "firewalld.service dbus-org.fedoraproject.FirewallD1.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "yes", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "yes", "ProtectControlGroups": "yes", "ProtectHome": "yes", "ProtectHostname": "yes", "ProtectKernelLogs": "yes", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "yes", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "sysinit.target dbus.socket system.slice", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "yes", "RestrictSUIDSGID": "yes", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "StandardError": "null", "StandardInput": "null", "StandardOutput": "null", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestamp": "Tue 2025-01-07 09:13:37 EST", "StateChangeTimestampMonotonic": "586808132", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "running", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallArchitectures": "native", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "2", "TasksMax": "22349", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "dbus", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "enabled", "UnitFileState": "enabled", "UtmpMode": "init", "WantedBy": "multi-user.target", "Wants": "network-pre.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.firewall : Enable and start firewalld service] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:28 Tuesday 07 January 2025 09:13:49 -0500 (0:00:00.544) 0:00:22.624 ******* ok: [managed-node2] => { "changed": false, "enabled": true, "name": "firewalld", "state": "started", "status": { "AccessSELinuxContext": "system_u:object_r:firewalld_unit_file_t:s0", "ActiveEnterTimestamp": "Tue 2025-01-07 09:09:59 EST", "ActiveEnterTimestampMonotonic": "368272829", "ActiveExitTimestampMonotonic": "0", "ActiveState": "active", "After": "dbus-broker.service sysinit.target dbus.socket system.slice basic.target polkit.service", "AllowIsolate": "no", "AssertResult": "yes", "AssertTimestamp": "Tue 2025-01-07 09:09:58 EST", "AssertTimestampMonotonic": "367306301", "Before": "shutdown.target network-pre.target multi-user.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "BusName": "org.fedoraproject.FirewallD1", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "548452000", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "yes", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_tty_config cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Tue 2025-01-07 09:09:58 EST", "ConditionTimestampMonotonic": "367306297", "ConfigurationDirectoryMode": "0755", "Conflicts": "iptables.service shutdown.target ipset.service ebtables.service ip6tables.service", "ControlGroup": "/system.slice/firewalld.service", "ControlGroupId": "5002", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "firewalld - dynamic firewall daemon", "DeviceAllow": "char-rtc r", "DevicePolicy": "closed", "Documentation": "\"man:firewalld(1)\"", "DynamicUser": "no", "EffectiveMemoryHigh": "3698233344", "EffectiveMemoryMax": "3698233344", "EffectiveTasksMax": "22349", "EnvironmentFiles": "/etc/sysconfig/firewalld (ignore_errors=yes)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestamp": "Tue 2025-01-07 09:09:58 EST", "ExecMainHandoffTimestampMonotonic": "367337087", "ExecMainPID": "10983", "ExecMainStartTimestamp": "Tue 2025-01-07 09:09:58 EST", "ExecMainStartTimestampMonotonic": "367309888", "ExecMainStatus": "0", "ExecReload": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecReloadEx": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStart": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/usr/lib/systemd/system/firewalld.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "firewalld.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestamp": "Tue 2025-01-07 09:09:58 EST", "InactiveExitTimestampMonotonic": "367310320", "InvocationID": "25e094e5f1f7407f91f3aca634286234", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13968", "LimitNPROCSoft": "13968", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13968", "LimitSIGPENDINGSoft": "13968", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "yes", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "10983", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3123372032", "MemoryCurrent": "33341440", "MemoryDenyWriteExecute": "yes", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "35258368", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "0", "MemorySwapMax": "infinity", "MemorySwapPeak": "0", "MemoryZSwapCurrent": "0", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "firewalld.service dbus-org.fedoraproject.FirewallD1.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "yes", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "yes", "ProtectControlGroups": "yes", "ProtectHome": "yes", "ProtectHostname": "yes", "ProtectKernelLogs": "yes", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "yes", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "sysinit.target dbus.socket system.slice", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "yes", "RestrictSUIDSGID": "yes", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "StandardError": "null", "StandardInput": "null", "StandardOutput": "null", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestamp": "Tue 2025-01-07 09:13:37 EST", "StateChangeTimestampMonotonic": "586808132", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "running", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallArchitectures": "native", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "2", "TasksMax": "22349", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "dbus", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "enabled", "UnitFileState": "enabled", "UtmpMode": "init", "WantedBy": "multi-user.target", "Wants": "network-pre.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.firewall : Check if previous replaced is defined] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:34 Tuesday 07 January 2025 09:13:50 -0500 (0:00:00.557) 0:00:23.182 ******* ok: [managed-node2] => { "ansible_facts": { "__firewall_previous_replaced": false, "__firewall_python_cmd": "/usr/bin/python3.12", "__firewall_report_changed": true }, "changed": false } TASK [fedora.linux_system_roles.firewall : Get config files, checksums before and remove] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:43 Tuesday 07 January 2025 09:13:50 -0500 (0:00:00.042) 0:00:23.224 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_previous_replaced | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Tell firewall module it is able to report changed] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:55 Tuesday 07 January 2025 09:13:50 -0500 (0:00:00.032) 0:00:23.257 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_previous_replaced | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Configure firewall] ***************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:71 Tuesday 07 January 2025 09:13:50 -0500 (0:00:00.031) 0:00:23.288 ******* changed: [managed-node2] => (item={'port': '8000/tcp', 'state': 'enabled'}) => { "__firewall_changed": true, "ansible_loop_var": "item", "changed": true, "item": { "port": "8000/tcp", "state": "enabled" } } changed: [managed-node2] => (item={'port': '9000/tcp', 'state': 'enabled'}) => { "__firewall_changed": true, "ansible_loop_var": "item", "changed": true, "item": { "port": "9000/tcp", "state": "enabled" } } TASK [fedora.linux_system_roles.firewall : Gather firewall config information] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:120 Tuesday 07 January 2025 09:13:51 -0500 (0:00:01.176) 0:00:24.465 ******* skipping: [managed-node2] => (item={'port': '8000/tcp', 'state': 'enabled'}) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall | length == 1", "item": { "port": "8000/tcp", "state": "enabled" }, "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item={'port': '9000/tcp', 'state': 'enabled'}) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall | length == 1", "item": { "port": "9000/tcp", "state": "enabled" }, "skip_reason": "Conditional result was False" } skipping: [managed-node2] => { "changed": false } MSG: All items skipped TASK [fedora.linux_system_roles.firewall : Update firewalld_config fact] ******* task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:130 Tuesday 07 January 2025 09:13:51 -0500 (0:00:00.064) 0:00:24.529 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "firewall | length == 1", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Gather firewall config if no arguments] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:139 Tuesday 07 January 2025 09:13:51 -0500 (0:00:00.043) 0:00:24.573 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "firewall == None or firewall | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Update firewalld_config fact] ******* task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:144 Tuesday 07 January 2025 09:13:51 -0500 (0:00:00.050) 0:00:24.623 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "firewall == None or firewall | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Get config files, checksums after] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:153 Tuesday 07 January 2025 09:13:51 -0500 (0:00:00.058) 0:00:24.681 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_previous_replaced | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Calculate what has changed] ********* task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:163 Tuesday 07 January 2025 09:13:51 -0500 (0:00:00.052) 0:00:24.734 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_previous_replaced | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Show diffs] ************************* task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:169 Tuesday 07 January 2025 09:13:51 -0500 (0:00:00.103) 0:00:24.837 ******* skipping: [managed-node2] => { "false_condition": "__firewall_previous_replaced | bool" } TASK [Manage selinux for specified ports] ************************************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:146 Tuesday 07 January 2025 09:13:51 -0500 (0:00:00.057) 0:00:24.894 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "podman_selinux_ports | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Keep track of users that need to cancel linger] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:153 Tuesday 07 January 2025 09:13:51 -0500 (0:00:00.040) 0:00:24.934 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_cancel_user_linger": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - present] ******* task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:157 Tuesday 07 January 2025 09:13:52 -0500 (0:00:00.037) 0:00:24.972 ******* skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle credential files - present] **** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:166 Tuesday 07 January 2025 09:13:52 -0500 (0:00:00.033) 0:00:25.006 ******* skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle secrets] *********************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:175 Tuesday 07 January 2025 09:13:52 -0500 (0:00:00.033) 0:00:25.039 ******* included: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml for managed-node2 => (item=(censored due to no_log)) included: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml for managed-node2 => (item=(censored due to no_log)) included: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml for managed-node2 => (item=(censored due to no_log)) TASK [fedora.linux_system_roles.podman : Set variables part 1] ***************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:3 Tuesday 07 January 2025 09:13:52 -0500 (0:00:00.136) 0:00:25.176 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:7 Tuesday 07 January 2025 09:13:52 -0500 (0:00:00.036) 0:00:25.213 ******* included: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 => (item=(censored due to no_log)) TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Tuesday 07 January 2025 09:13:52 -0500 (0:00:00.058) 0:00:25.271 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Tuesday 07 January 2025 09:13:52 -0500 (0:00:00.044) 0:00:25.316 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Tuesday 07 January 2025 09:13:52 -0500 (0:00:00.060) 0:00:25.376 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Tuesday 07 January 2025 09:13:52 -0500 (0:00:00.070) 0:00:25.447 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Tuesday 07 January 2025 09:13:52 -0500 (0:00:00.053) 0:00:25.501 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Tuesday 07 January 2025 09:13:52 -0500 (0:00:00.066) 0:00:25.567 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Tuesday 07 January 2025 09:13:52 -0500 (0:00:00.071) 0:00:25.638 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Tuesday 07 January 2025 09:13:52 -0500 (0:00:00.052) 0:00:25.691 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Tuesday 07 January 2025 09:13:52 -0500 (0:00:00.044) 0:00:25.736 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Tuesday 07 January 2025 09:13:52 -0500 (0:00:00.035) 0:00:25.771 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Tuesday 07 January 2025 09:13:52 -0500 (0:00:00.091) 0:00:25.863 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Tuesday 07 January 2025 09:13:52 -0500 (0:00:00.037) 0:00:25.901 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set variables part 2] ***************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:14 Tuesday 07 January 2025 09:13:52 -0500 (0:00:00.038) 0:00:25.939 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_rootless": false, "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:20 Tuesday 07 January 2025 09:13:53 -0500 (0:00:00.039) 0:00:25.979 ******* included: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Tuesday 07 January 2025 09:13:53 -0500 (0:00:00.059) 0:00:26.039 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Tuesday 07 January 2025 09:13:53 -0500 (0:00:00.032) 0:00:26.071 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Tuesday 07 January 2025 09:13:53 -0500 (0:00:00.031) 0:00:26.103 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:25 Tuesday 07 January 2025 09:13:53 -0500 (0:00:00.032) 0:00:26.136 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Manage each secret] ******************* task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:41 Tuesday 07 January 2025 09:13:53 -0500 (0:00:00.034) 0:00:26.170 ******* [WARNING]: Using a variable for a task's 'args' is unsafe in some situations (see https://docs.ansible.com/ansible/devel/reference_appendices/faq.html#argsplat- unsafe) changed: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } TASK [fedora.linux_system_roles.podman : Set variables part 1] ***************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:3 Tuesday 07 January 2025 09:13:53 -0500 (0:00:00.625) 0:00:26.795 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:7 Tuesday 07 January 2025 09:13:53 -0500 (0:00:00.039) 0:00:26.835 ******* included: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 => (item=(censored due to no_log)) TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Tuesday 07 January 2025 09:13:53 -0500 (0:00:00.068) 0:00:26.903 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Tuesday 07 January 2025 09:13:53 -0500 (0:00:00.044) 0:00:26.947 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Tuesday 07 January 2025 09:13:54 -0500 (0:00:00.039) 0:00:26.987 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Tuesday 07 January 2025 09:13:54 -0500 (0:00:00.047) 0:00:27.034 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Tuesday 07 January 2025 09:13:54 -0500 (0:00:00.032) 0:00:27.067 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Tuesday 07 January 2025 09:13:54 -0500 (0:00:00.032) 0:00:27.100 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Tuesday 07 January 2025 09:13:54 -0500 (0:00:00.031) 0:00:27.131 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Tuesday 07 January 2025 09:13:54 -0500 (0:00:00.033) 0:00:27.165 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Tuesday 07 January 2025 09:13:54 -0500 (0:00:00.033) 0:00:27.198 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Tuesday 07 January 2025 09:13:54 -0500 (0:00:00.095) 0:00:27.293 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Tuesday 07 January 2025 09:13:54 -0500 (0:00:00.053) 0:00:27.346 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Tuesday 07 January 2025 09:13:54 -0500 (0:00:00.038) 0:00:27.385 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set variables part 2] ***************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:14 Tuesday 07 January 2025 09:13:54 -0500 (0:00:00.040) 0:00:27.425 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_rootless": false, "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:20 Tuesday 07 January 2025 09:13:54 -0500 (0:00:00.051) 0:00:27.477 ******* included: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Tuesday 07 January 2025 09:13:54 -0500 (0:00:00.068) 0:00:27.546 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Tuesday 07 January 2025 09:13:54 -0500 (0:00:00.032) 0:00:27.578 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Tuesday 07 January 2025 09:13:54 -0500 (0:00:00.032) 0:00:27.611 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:25 Tuesday 07 January 2025 09:13:54 -0500 (0:00:00.038) 0:00:27.649 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Manage each secret] ******************* task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:41 Tuesday 07 January 2025 09:13:54 -0500 (0:00:00.044) 0:00:27.694 ******* changed: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } TASK [fedora.linux_system_roles.podman : Set variables part 1] ***************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:3 Tuesday 07 January 2025 09:13:55 -0500 (0:00:00.509) 0:00:28.203 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:7 Tuesday 07 January 2025 09:13:55 -0500 (0:00:00.037) 0:00:28.240 ******* included: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 => (item=(censored due to no_log)) TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Tuesday 07 January 2025 09:13:55 -0500 (0:00:00.058) 0:00:28.299 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Tuesday 07 January 2025 09:13:55 -0500 (0:00:00.036) 0:00:28.336 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Tuesday 07 January 2025 09:13:55 -0500 (0:00:00.035) 0:00:28.371 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Tuesday 07 January 2025 09:13:55 -0500 (0:00:00.059) 0:00:28.431 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Tuesday 07 January 2025 09:13:55 -0500 (0:00:00.051) 0:00:28.482 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Tuesday 07 January 2025 09:13:55 -0500 (0:00:00.039) 0:00:28.521 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Tuesday 07 January 2025 09:13:55 -0500 (0:00:00.039) 0:00:28.561 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Tuesday 07 January 2025 09:13:55 -0500 (0:00:00.041) 0:00:28.603 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Tuesday 07 January 2025 09:13:55 -0500 (0:00:00.093) 0:00:28.697 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Tuesday 07 January 2025 09:13:55 -0500 (0:00:00.048) 0:00:28.745 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Tuesday 07 January 2025 09:13:55 -0500 (0:00:00.035) 0:00:28.781 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Tuesday 07 January 2025 09:13:55 -0500 (0:00:00.033) 0:00:28.814 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set variables part 2] ***************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:14 Tuesday 07 January 2025 09:13:55 -0500 (0:00:00.033) 0:00:28.848 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_rootless": false, "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:20 Tuesday 07 January 2025 09:13:55 -0500 (0:00:00.040) 0:00:28.888 ******* included: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Tuesday 07 January 2025 09:13:56 -0500 (0:00:00.064) 0:00:28.953 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Tuesday 07 January 2025 09:13:56 -0500 (0:00:00.050) 0:00:29.004 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Tuesday 07 January 2025 09:13:56 -0500 (0:00:00.049) 0:00:29.054 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:25 Tuesday 07 January 2025 09:13:56 -0500 (0:00:00.035) 0:00:29.090 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Manage each secret] ******************* task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:41 Tuesday 07 January 2025 09:13:56 -0500 (0:00:00.039) 0:00:29.129 ******* changed: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } TASK [fedora.linux_system_roles.podman : Handle Kubernetes specifications] ***** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:182 Tuesday 07 January 2025 09:13:56 -0500 (0:00:00.495) 0:00:29.625 ******* skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Quadlet specifications] ******** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:189 Tuesday 07 January 2025 09:13:56 -0500 (0:00:00.029) 0:00:29.654 ******* included: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node2 => (item=(censored due to no_log)) included: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node2 => (item=(censored due to no_log)) included: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node2 => (item=(censored due to no_log)) included: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node2 => (item=(censored due to no_log)) included: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node2 => (item=(censored due to no_log)) included: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node2 => (item=(censored due to no_log)) TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Tuesday 07 January 2025 09:13:56 -0500 (0:00:00.166) 0:00:29.821 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_file_src": "quadlet-demo.network", "__podman_quadlet_spec": {}, "__podman_quadlet_str": "[Network]\nSubnet=192.168.30.0/24\nGateway=192.168.30.1\nLabel=app=wordpress", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Tuesday 07 January 2025 09:13:56 -0500 (0:00:00.045) 0:00:29.867 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "created", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Tuesday 07 January 2025 09:13:56 -0500 (0:00:00.041) 0:00:29.908 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_quadlet_file_src", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Tuesday 07 January 2025 09:13:56 -0500 (0:00:00.032) 0:00:29.941 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-demo", "__podman_quadlet_type": "network", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Tuesday 07 January 2025 09:13:57 -0500 (0:00:00.048) 0:00:29.990 ******* included: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Tuesday 07 January 2025 09:13:57 -0500 (0:00:00.103) 0:00:30.094 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Tuesday 07 January 2025 09:13:57 -0500 (0:00:00.036) 0:00:30.130 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Tuesday 07 January 2025 09:13:57 -0500 (0:00:00.037) 0:00:30.168 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Tuesday 07 January 2025 09:13:57 -0500 (0:00:00.044) 0:00:30.213 ******* ok: [managed-node2] => { "changed": false, "stat": { "atime": 1736258992.2408626, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1736258966.2956612, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9125782, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "117023548", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Tuesday 07 January 2025 09:13:57 -0500 (0:00:00.377) 0:00:30.590 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Tuesday 07 January 2025 09:13:57 -0500 (0:00:00.033) 0:00:30.624 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Tuesday 07 January 2025 09:13:57 -0500 (0:00:00.036) 0:00:30.661 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Tuesday 07 January 2025 09:13:57 -0500 (0:00:00.034) 0:00:30.695 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Tuesday 07 January 2025 09:13:57 -0500 (0:00:00.036) 0:00:30.731 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Tuesday 07 January 2025 09:13:57 -0500 (0:00:00.034) 0:00:30.766 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Tuesday 07 January 2025 09:13:57 -0500 (0:00:00.057) 0:00:30.824 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Tuesday 07 January 2025 09:13:57 -0500 (0:00:00.059) 0:00:30.883 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Tuesday 07 January 2025 09:13:57 -0500 (0:00:00.054) 0:00:30.938 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-demo-network.service", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Tuesday 07 January 2025 09:13:58 -0500 (0:00:00.089) 0:00:31.027 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Tuesday 07 January 2025 09:13:58 -0500 (0:00:00.057) 0:00:31.085 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_kube_yamls_raw | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Tuesday 07 January 2025 09:13:58 -0500 (0:00:00.055) 0:00:31.140 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/etc/containers/systemd/quadlet-demo.network", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Tuesday 07 January 2025 09:13:58 -0500 (0:00:00.129) 0:00:31.270 ******* ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Tuesday 07 January 2025 09:13:58 -0500 (0:00:00.063) 0:00:31.334 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state == \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Tuesday 07 January 2025 09:13:58 -0500 (0:00:00.053) 0:00:31.387 ******* included: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2 Tuesday 07 January 2025 09:13:58 -0500 (0:00:00.177) 0:00:31.565 ******* included: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Tuesday 07 January 2025 09:13:58 -0500 (0:00:00.092) 0:00:31.658 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Tuesday 07 January 2025 09:13:58 -0500 (0:00:00.039) 0:00:31.697 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Tuesday 07 January 2025 09:13:58 -0500 (0:00:00.039) 0:00:31.737 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create host directories] ************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7 Tuesday 07 January 2025 09:13:58 -0500 (0:00:00.039) 0:00:31.777 ******* skipping: [managed-node2] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Ensure container images are present] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 Tuesday 07 January 2025 09:13:58 -0500 (0:00:00.034) 0:00:31.812 ******* skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39 Tuesday 07 January 2025 09:13:58 -0500 (0:00:00.032) 0:00:31.844 ******* ok: [managed-node2] => { "changed": false, "gid": 0, "group": "root", "mode": "0755", "owner": "root", "path": "/etc/containers/systemd", "secontext": "system_u:object_r:etc_t:s0", "size": 6, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ******** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:48 Tuesday 07 January 2025 09:13:59 -0500 (0:00:00.408) 0:00:32.253 ******* changed: [managed-node2] => { "changed": true, "checksum": "e57c08d49aff4bae8daab138d913aeddaa8682a0", "dest": "/etc/containers/systemd/quadlet-demo.network", "gid": 0, "group": "root", "md5sum": "061f3cf318cbd8ab5794bb1173831fb8", "mode": "0644", "owner": "root", "secontext": "system_u:object_r:etc_t:s0", "size": 74, "src": "/root/.ansible/tmp/ansible-tmp-1736259239.349858-19473-42692965240365/.source.network", "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:58 Tuesday 07 January 2025 09:14:00 -0500 (0:00:00.840) 0:00:33.093 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_quadlet_file_src", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] ******* task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:70 Tuesday 07 January 2025 09:14:00 -0500 (0:00:00.066) 0:00:33.159 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_copy_file is skipped", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Reload systemctl] ********************* task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:82 Tuesday 07 January 2025 09:14:00 -0500 (0:00:00.059) 0:00:33.219 ******* ok: [managed-node2] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Start service] ************************ task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110 Tuesday 07 January 2025 09:14:01 -0500 (0:00:00.799) 0:00:34.018 ******* changed: [managed-node2] => { "changed": true, "name": "quadlet-demo-network.service", "state": "started", "status": { "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0", "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "network-online.target -.mount systemd-journald.socket basic.target sysinit.target system.slice", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "quadlet-demo-network.service", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveMemoryHigh": "3698233344", "EffectiveMemoryMax": "3698233344", "EffectiveTasksMax": "22349", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman network create --ignore --subnet 192.168.30.0/24 --gateway 192.168.30.1 --label app=wordpress systemd-quadlet-demo ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman network create --ignore --subnet 192.168.30.0/24 --gateway 192.168.30.1 --label app=wordpress systemd-quadlet-demo ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/run/systemd/generator/quadlet-demo-network.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "quadlet-demo-network.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13968", "LimitNPROCSoft": "13968", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13968", "LimitSIGPENDINGSoft": "13968", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3119398912", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "[not set]", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "[not set]", "MemorySwapMax": "infinity", "MemorySwapPeak": "[not set]", "MemoryZSwapCurrent": "[not set]", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "quadlet-demo-network.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "yes", "RemoveIPC": "no", "Requires": "system.slice sysinit.target -.mount", "RequiresMountsFor": "/run/containers", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "SourcePath": "/etc/containers/systemd/quadlet-demo.network", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-demo-network", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22349", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "infinity", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "oneshot", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "Wants": "network-online.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity" } } TASK [fedora.linux_system_roles.podman : Restart service] ********************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:125 Tuesday 07 January 2025 09:14:01 -0500 (0:00:00.614) 0:00:34.633 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_service_started is changed", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Tuesday 07 January 2025 09:14:01 -0500 (0:00:00.039) 0:00:34.673 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_file_src": "quadlet-demo-mysql.volume", "__podman_quadlet_spec": {}, "__podman_quadlet_str": "[Volume]", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Tuesday 07 January 2025 09:14:01 -0500 (0:00:00.063) 0:00:34.736 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "created", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Tuesday 07 January 2025 09:14:01 -0500 (0:00:00.064) 0:00:34.801 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_quadlet_file_src", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Tuesday 07 January 2025 09:14:01 -0500 (0:00:00.067) 0:00:34.869 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-demo-mysql", "__podman_quadlet_type": "volume", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Tuesday 07 January 2025 09:14:02 -0500 (0:00:00.085) 0:00:34.954 ******* included: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Tuesday 07 January 2025 09:14:02 -0500 (0:00:00.131) 0:00:35.086 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Tuesday 07 January 2025 09:14:02 -0500 (0:00:00.058) 0:00:35.145 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Tuesday 07 January 2025 09:14:02 -0500 (0:00:00.219) 0:00:35.365 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Tuesday 07 January 2025 09:14:02 -0500 (0:00:00.096) 0:00:35.461 ******* ok: [managed-node2] => { "changed": false, "stat": { "atime": 1736258992.2408626, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1736258966.2956612, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9125782, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "117023548", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Tuesday 07 January 2025 09:14:02 -0500 (0:00:00.457) 0:00:35.918 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Tuesday 07 January 2025 09:14:03 -0500 (0:00:00.065) 0:00:35.984 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Tuesday 07 January 2025 09:14:03 -0500 (0:00:00.058) 0:00:36.042 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Tuesday 07 January 2025 09:14:03 -0500 (0:00:00.077) 0:00:36.120 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Tuesday 07 January 2025 09:14:03 -0500 (0:00:00.091) 0:00:36.211 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Tuesday 07 January 2025 09:14:03 -0500 (0:00:00.064) 0:00:36.276 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Tuesday 07 January 2025 09:14:03 -0500 (0:00:00.065) 0:00:36.341 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Tuesday 07 January 2025 09:14:03 -0500 (0:00:00.050) 0:00:36.391 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Tuesday 07 January 2025 09:14:03 -0500 (0:00:00.050) 0:00:36.442 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-demo-mysql-volume.service", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Tuesday 07 January 2025 09:14:03 -0500 (0:00:00.070) 0:00:36.513 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Tuesday 07 January 2025 09:14:03 -0500 (0:00:00.036) 0:00:36.549 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_kube_yamls_raw | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Tuesday 07 January 2025 09:14:03 -0500 (0:00:00.035) 0:00:36.584 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/etc/containers/systemd/quadlet-demo-mysql.volume", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Tuesday 07 January 2025 09:14:03 -0500 (0:00:00.091) 0:00:36.676 ******* ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Tuesday 07 January 2025 09:14:03 -0500 (0:00:00.074) 0:00:36.750 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state == \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Tuesday 07 January 2025 09:14:03 -0500 (0:00:00.056) 0:00:36.807 ******* included: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2 Tuesday 07 January 2025 09:14:03 -0500 (0:00:00.107) 0:00:36.914 ******* included: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Tuesday 07 January 2025 09:14:04 -0500 (0:00:00.074) 0:00:36.989 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Tuesday 07 January 2025 09:14:04 -0500 (0:00:00.094) 0:00:37.084 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Tuesday 07 January 2025 09:14:04 -0500 (0:00:00.044) 0:00:37.128 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create host directories] ************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7 Tuesday 07 January 2025 09:14:04 -0500 (0:00:00.047) 0:00:37.175 ******* skipping: [managed-node2] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Ensure container images are present] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 Tuesday 07 January 2025 09:14:04 -0500 (0:00:00.046) 0:00:37.222 ******* skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39 Tuesday 07 January 2025 09:14:04 -0500 (0:00:00.059) 0:00:37.281 ******* ok: [managed-node2] => { "changed": false, "gid": 0, "group": "root", "mode": "0755", "owner": "root", "path": "/etc/containers/systemd", "secontext": "system_u:object_r:etc_t:s0", "size": 34, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ******** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:48 Tuesday 07 January 2025 09:14:04 -0500 (0:00:00.454) 0:00:37.736 ******* changed: [managed-node2] => { "changed": true, "checksum": "585f8cbdf0ec73000f9227dcffbef71e9552ea4a", "dest": "/etc/containers/systemd/quadlet-demo-mysql.volume", "gid": 0, "group": "root", "md5sum": "5ddd03a022aeb4502d9bc8ce436b4233", "mode": "0644", "owner": "root", "secontext": "system_u:object_r:etc_t:s0", "size": 9, "src": "/root/.ansible/tmp/ansible-tmp-1736259244.858735-19722-68882637510749/.source.volume", "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:58 Tuesday 07 January 2025 09:14:05 -0500 (0:00:00.810) 0:00:38.546 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_quadlet_file_src", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] ******* task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:70 Tuesday 07 January 2025 09:14:05 -0500 (0:00:00.034) 0:00:38.581 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_copy_file is skipped", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Reload systemctl] ********************* task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:82 Tuesday 07 January 2025 09:14:05 -0500 (0:00:00.037) 0:00:38.619 ******* ok: [managed-node2] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Start service] ************************ task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110 Tuesday 07 January 2025 09:14:06 -0500 (0:00:00.842) 0:00:39.461 ******* changed: [managed-node2] => { "changed": true, "name": "quadlet-demo-mysql-volume.service", "state": "started", "status": { "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0", "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "sysinit.target network-online.target basic.target system.slice systemd-journald.socket -.mount", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "quadlet-demo-mysql-volume.service", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveMemoryHigh": "3698233344", "EffectiveMemoryMax": "3698233344", "EffectiveTasksMax": "22349", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman volume create --ignore systemd-quadlet-demo-mysql ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman volume create --ignore systemd-quadlet-demo-mysql ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/run/systemd/generator/quadlet-demo-mysql-volume.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "quadlet-demo-mysql-volume.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13968", "LimitNPROCSoft": "13968", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13968", "LimitSIGPENDINGSoft": "13968", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3114532864", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "[not set]", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "[not set]", "MemorySwapMax": "infinity", "MemorySwapPeak": "[not set]", "MemoryZSwapCurrent": "[not set]", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "quadlet-demo-mysql-volume.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "yes", "RemoveIPC": "no", "Requires": "sysinit.target system.slice -.mount", "RequiresMountsFor": "/run/containers", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "SourcePath": "/etc/containers/systemd/quadlet-demo-mysql.volume", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-demo-mysql-volume", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22349", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "infinity", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "oneshot", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "Wants": "network-online.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity" } } TASK [fedora.linux_system_roles.podman : Restart service] ********************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:125 Tuesday 07 January 2025 09:14:07 -0500 (0:00:00.614) 0:00:40.076 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_service_started is changed", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Tuesday 07 January 2025 09:14:07 -0500 (0:00:00.034) 0:00:40.110 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": {}, "__podman_quadlet_str": "[Install]\nWantedBy=default.target\n\n[Container]\nImage=quay.io/linux-system-roles/mysql:5.6\nContainerName=quadlet-demo-mysql\nVolume=quadlet-demo-mysql.volume:/var/lib/mysql\nVolume=/tmp/quadlet_demo:/var/lib/quadlet_demo:Z\nNetwork=quadlet-demo.network\nSecret=mysql-root-password-container,type=env,target=MYSQL_ROOT_PASSWORD\nHealthCmd=/bin/true\nHealthOnFailure=kill\n", "__podman_quadlet_template_src": "quadlet-demo-mysql.container.j2" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Tuesday 07 January 2025 09:14:07 -0500 (0:00:00.120) 0:00:40.230 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "created", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Tuesday 07 January 2025 09:14:07 -0500 (0:00:00.070) 0:00:40.301 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_quadlet_str", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Tuesday 07 January 2025 09:14:07 -0500 (0:00:00.095) 0:00:40.396 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-demo-mysql", "__podman_quadlet_type": "container", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Tuesday 07 January 2025 09:14:07 -0500 (0:00:00.077) 0:00:40.474 ******* included: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Tuesday 07 January 2025 09:14:07 -0500 (0:00:00.074) 0:00:40.549 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Tuesday 07 January 2025 09:14:07 -0500 (0:00:00.038) 0:00:40.587 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Tuesday 07 January 2025 09:14:07 -0500 (0:00:00.036) 0:00:40.623 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Tuesday 07 January 2025 09:14:07 -0500 (0:00:00.047) 0:00:40.671 ******* ok: [managed-node2] => { "changed": false, "stat": { "atime": 1736258992.2408626, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1736258966.2956612, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9125782, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "117023548", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Tuesday 07 January 2025 09:14:08 -0500 (0:00:00.445) 0:00:41.117 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Tuesday 07 January 2025 09:14:08 -0500 (0:00:00.130) 0:00:41.247 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Tuesday 07 January 2025 09:14:08 -0500 (0:00:00.058) 0:00:41.306 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Tuesday 07 January 2025 09:14:08 -0500 (0:00:00.085) 0:00:41.391 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Tuesday 07 January 2025 09:14:08 -0500 (0:00:00.118) 0:00:41.510 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Tuesday 07 January 2025 09:14:08 -0500 (0:00:00.072) 0:00:41.582 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Tuesday 07 January 2025 09:14:08 -0500 (0:00:00.094) 0:00:41.676 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Tuesday 07 January 2025 09:14:08 -0500 (0:00:00.065) 0:00:41.742 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Tuesday 07 January 2025 09:14:08 -0500 (0:00:00.053) 0:00:41.796 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [ "quay.io/linux-system-roles/mysql:5.6" ], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-demo-mysql.service", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Tuesday 07 January 2025 09:14:08 -0500 (0:00:00.071) 0:00:41.868 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Tuesday 07 January 2025 09:14:08 -0500 (0:00:00.043) 0:00:41.911 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_kube_yamls_raw | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Tuesday 07 January 2025 09:14:08 -0500 (0:00:00.035) 0:00:41.947 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_images": [ "quay.io/linux-system-roles/mysql:5.6" ], "__podman_quadlet_file": "/etc/containers/systemd/quadlet-demo-mysql.container", "__podman_volumes": [ "/tmp/quadlet_demo" ] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Tuesday 07 January 2025 09:14:09 -0500 (0:00:00.084) 0:00:42.032 ******* ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Tuesday 07 January 2025 09:14:09 -0500 (0:00:00.064) 0:00:42.097 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state == \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Tuesday 07 January 2025 09:14:09 -0500 (0:00:00.062) 0:00:42.160 ******* included: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2 Tuesday 07 January 2025 09:14:09 -0500 (0:00:00.096) 0:00:42.256 ******* included: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Tuesday 07 January 2025 09:14:09 -0500 (0:00:00.080) 0:00:42.337 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Tuesday 07 January 2025 09:14:09 -0500 (0:00:00.038) 0:00:42.375 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Tuesday 07 January 2025 09:14:09 -0500 (0:00:00.033) 0:00:42.409 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create host directories] ************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7 Tuesday 07 January 2025 09:14:09 -0500 (0:00:00.031) 0:00:42.441 ******* changed: [managed-node2] => (item=/tmp/quadlet_demo) => { "ansible_loop_var": "item", "changed": true, "gid": 0, "group": "root", "item": "/tmp/quadlet_demo", "mode": "0777", "owner": "root", "path": "/tmp/quadlet_demo", "secontext": "unconfined_u:object_r:user_tmp_t:s0", "size": 6, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure container images are present] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 Tuesday 07 January 2025 09:14:09 -0500 (0:00:00.460) 0:00:42.902 ******* changed: [managed-node2] => (item=None) => { "attempts": 1, "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } changed: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39 Tuesday 07 January 2025 09:14:18 -0500 (0:00:08.279) 0:00:51.181 ******* ok: [managed-node2] => { "changed": false, "gid": 0, "group": "root", "mode": "0755", "owner": "root", "path": "/etc/containers/systemd", "secontext": "system_u:object_r:etc_t:s0", "size": 67, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ******** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:48 Tuesday 07 January 2025 09:14:18 -0500 (0:00:00.445) 0:00:51.627 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_quadlet_file_src | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:58 Tuesday 07 January 2025 09:14:18 -0500 (0:00:00.034) 0:00:51.661 ******* changed: [managed-node2] => { "changed": true, "checksum": "ca62b2ad3cc9afb5b5371ebbf797b9bc4fd7edd4", "dest": "/etc/containers/systemd/quadlet-demo-mysql.container", "gid": 0, "group": "root", "md5sum": "341b473056d2a5dfa35970b0d2e23a5d", "mode": "0644", "owner": "root", "secontext": "system_u:object_r:etc_t:s0", "size": 363, "src": "/root/.ansible/tmp/ansible-tmp-1736259258.759416-20427-10094735080069/.source.container", "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] ******* task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:70 Tuesday 07 January 2025 09:14:19 -0500 (0:00:00.762) 0:00:52.424 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_copy_content is skipped", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Reload systemctl] ********************* task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:82 Tuesday 07 January 2025 09:14:19 -0500 (0:00:00.044) 0:00:52.469 ******* ok: [managed-node2] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Start service] ************************ task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110 Tuesday 07 January 2025 09:14:20 -0500 (0:00:00.836) 0:00:53.305 ******* changed: [managed-node2] => { "changed": true, "name": "quadlet-demo-mysql.service", "state": "started", "status": { "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0", "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "sysinit.target basic.target quadlet-demo-network.service system.slice -.mount tmp.mount quadlet-demo-mysql-volume.service systemd-journald.socket network-online.target", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "shutdown.target multi-user.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "yes", "DelegateControllers": "cpu cpuset io memory pids", "Description": "quadlet-demo-mysql.service", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveMemoryHigh": "3698233344", "EffectiveMemoryMax": "3698233344", "EffectiveTasksMax": "22349", "Environment": "PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman run --name quadlet-demo-mysql --cidfile=/run/quadlet-demo-mysql.cid --replace --rm --cgroups=split --network systemd-quadlet-demo --sdnotify=conmon -d -v systemd-quadlet-demo-mysql:/var/lib/mysql -v /tmp/quadlet_demo:/var/lib/quadlet_demo:Z --secret mysql-root-password-container,type=env,target=MYSQL_ROOT_PASSWORD --health-cmd /bin/true --health-on-failure kill quay.io/linux-system-roles/mysql:5.6 ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman run --name quadlet-demo-mysql --cidfile=/run/quadlet-demo-mysql.cid --replace --rm --cgroups=split --network systemd-quadlet-demo --sdnotify=conmon -d -v systemd-quadlet-demo-mysql:/var/lib/mysql -v /tmp/quadlet_demo:/var/lib/quadlet_demo:Z --secret mysql-root-password-container,type=env,target=MYSQL_ROOT_PASSWORD --health-cmd /bin/true --health-on-failure kill quay.io/linux-system-roles/mysql:5.6 ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStop": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-demo-mysql.cid ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-demo-mysql.cid ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPost": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-demo-mysql.cid ; ignore_errors=yes ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPostEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-demo-mysql.cid ; flags=ignore-failure ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/run/systemd/generator/quadlet-demo-mysql.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "quadlet-demo-mysql.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13968", "LimitNPROCSoft": "13968", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13968", "LimitSIGPENDINGSoft": "13968", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "2978422784", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "[not set]", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "[not set]", "MemorySwapMax": "infinity", "MemorySwapPeak": "[not set]", "MemoryZSwapCurrent": "[not set]", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "quadlet-demo-mysql.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "all", "OOMPolicy": "continue", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "quadlet-demo-mysql-volume.service sysinit.target -.mount quadlet-demo-network.service system.slice", "RequiresMountsFor": "/run/containers /tmp/quadlet_demo", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "SourcePath": "/etc/containers/systemd/quadlet-demo-mysql.container", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-demo-mysql", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22349", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "WantedBy": "multi-user.target", "Wants": "network-online.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity" } } TASK [fedora.linux_system_roles.podman : Restart service] ********************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:125 Tuesday 07 January 2025 09:14:21 -0500 (0:00:01.001) 0:00:54.307 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_service_started is changed", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Tuesday 07 January 2025 09:14:21 -0500 (0:00:00.061) 0:00:54.368 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_file_src": "envoy-proxy-configmap.yml", "__podman_quadlet_spec": {}, "__podman_quadlet_str": "---\napiVersion: v1\nkind: ConfigMap\nmetadata:\n name: envoy-proxy-config\ndata:\n envoy.yaml: |\n admin:\n address:\n socket_address:\n address: 0.0.0.0\n port_value: 9901\n\n static_resources:\n listeners:\n - name: listener_0\n address:\n socket_address:\n address: 0.0.0.0\n port_value: 8080\n filter_chains:\n - filters:\n - name: envoy.filters.network.http_connection_manager\n typed_config:\n \"@type\": type.googleapis.com/envoy.extensions.filters.network.http_connection_manager.v3.HttpConnectionManager\n stat_prefix: ingress_http\n codec_type: AUTO\n route_config:\n name: local_route\n virtual_hosts:\n - name: local_service\n domains: [\"*\"]\n routes:\n - match:\n prefix: \"/\"\n route:\n cluster: backend\n http_filters:\n - name: envoy.filters.http.router\n typed_config:\n \"@type\": type.googleapis.com/envoy.extensions.filters.http.router.v3.Router\n transport_socket:\n name: envoy.transport_sockets.tls\n typed_config:\n \"@type\": type.googleapis.com/envoy.extensions.transport_sockets.tls.v3.DownstreamTlsContext\n common_tls_context:\n tls_certificates:\n - certificate_chain:\n filename: /etc/envoy-certificates/certificate.pem\n private_key:\n filename: /etc/envoy-certificates/certificate.key\n clusters:\n - name: backend\n connect_timeout: 5s\n type: STATIC\n dns_refresh_rate: 1800s\n lb_policy: ROUND_ROBIN\n load_assignment:\n cluster_name: backend\n endpoints:\n - lb_endpoints:\n - endpoint:\n address:\n socket_address:\n address: 127.0.0.1\n port_value: 80", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Tuesday 07 January 2025 09:14:21 -0500 (0:00:00.075) 0:00:54.444 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "created", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Tuesday 07 January 2025 09:14:21 -0500 (0:00:00.095) 0:00:54.539 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_quadlet_file_src", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Tuesday 07 January 2025 09:14:21 -0500 (0:00:00.086) 0:00:54.625 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_name": "envoy-proxy-configmap", "__podman_quadlet_type": "yml", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Tuesday 07 January 2025 09:14:21 -0500 (0:00:00.097) 0:00:54.723 ******* included: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Tuesday 07 January 2025 09:14:21 -0500 (0:00:00.143) 0:00:54.866 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Tuesday 07 January 2025 09:14:21 -0500 (0:00:00.060) 0:00:54.926 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Tuesday 07 January 2025 09:14:22 -0500 (0:00:00.064) 0:00:54.991 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Tuesday 07 January 2025 09:14:22 -0500 (0:00:00.075) 0:00:55.066 ******* ok: [managed-node2] => { "changed": false, "stat": { "atime": 1736258992.2408626, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1736258966.2956612, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9125782, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "117023548", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Tuesday 07 January 2025 09:14:22 -0500 (0:00:00.487) 0:00:55.554 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Tuesday 07 January 2025 09:14:22 -0500 (0:00:00.058) 0:00:55.613 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Tuesday 07 January 2025 09:14:22 -0500 (0:00:00.070) 0:00:55.683 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Tuesday 07 January 2025 09:14:22 -0500 (0:00:00.058) 0:00:55.742 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Tuesday 07 January 2025 09:14:22 -0500 (0:00:00.184) 0:00:55.926 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Tuesday 07 January 2025 09:14:23 -0500 (0:00:00.064) 0:00:55.991 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Tuesday 07 January 2025 09:14:23 -0500 (0:00:00.063) 0:00:56.054 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Tuesday 07 January 2025 09:14:23 -0500 (0:00:00.074) 0:00:56.128 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Tuesday 07 January 2025 09:14:23 -0500 (0:00:00.061) 0:00:56.190 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": "", "__podman_service_name": "", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Tuesday 07 January 2025 09:14:23 -0500 (0:00:00.092) 0:00:56.283 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Tuesday 07 January 2025 09:14:23 -0500 (0:00:00.062) 0:00:56.345 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_kube_yamls_raw | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Tuesday 07 January 2025 09:14:23 -0500 (0:00:00.071) 0:00:56.417 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/etc/containers/systemd/envoy-proxy-configmap.yml", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Tuesday 07 January 2025 09:14:23 -0500 (0:00:00.185) 0:00:56.603 ******* ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Tuesday 07 January 2025 09:14:23 -0500 (0:00:00.088) 0:00:56.691 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state == \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Tuesday 07 January 2025 09:14:23 -0500 (0:00:00.067) 0:00:56.758 ******* included: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2 Tuesday 07 January 2025 09:14:23 -0500 (0:00:00.115) 0:00:56.874 ******* included: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Tuesday 07 January 2025 09:14:24 -0500 (0:00:00.097) 0:00:56.972 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Tuesday 07 January 2025 09:14:24 -0500 (0:00:00.070) 0:00:57.042 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Tuesday 07 January 2025 09:14:24 -0500 (0:00:00.059) 0:00:57.102 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create host directories] ************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7 Tuesday 07 January 2025 09:14:24 -0500 (0:00:00.052) 0:00:57.154 ******* skipping: [managed-node2] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Ensure container images are present] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 Tuesday 07 January 2025 09:14:24 -0500 (0:00:00.041) 0:00:57.195 ******* skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39 Tuesday 07 January 2025 09:14:24 -0500 (0:00:00.040) 0:00:57.236 ******* ok: [managed-node2] => { "changed": false, "gid": 0, "group": "root", "mode": "0755", "owner": "root", "path": "/etc/containers/systemd", "secontext": "system_u:object_r:etc_t:s0", "size": 103, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ******** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:48 Tuesday 07 January 2025 09:14:24 -0500 (0:00:00.443) 0:00:57.679 ******* changed: [managed-node2] => { "changed": true, "checksum": "d681c7d56f912150d041873e880818b22a90c188", "dest": "/etc/containers/systemd/envoy-proxy-configmap.yml", "gid": 0, "group": "root", "md5sum": "aec75d972c231aac004e1338934544cf", "mode": "0644", "owner": "root", "secontext": "system_u:object_r:etc_t:s0", "size": 2102, "src": "/root/.ansible/tmp/ansible-tmp-1736259264.7863326-20711-60529197268232/.source.yml", "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:58 Tuesday 07 January 2025 09:14:25 -0500 (0:00:00.739) 0:00:58.418 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_quadlet_file_src", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] ******* task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:70 Tuesday 07 January 2025 09:14:25 -0500 (0:00:00.119) 0:00:58.538 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_copy_file is skipped", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Reload systemctl] ********************* task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:82 Tuesday 07 January 2025 09:14:25 -0500 (0:00:00.055) 0:00:58.593 ******* ok: [managed-node2] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Start service] ************************ task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110 Tuesday 07 January 2025 09:14:26 -0500 (0:00:00.975) 0:00:59.568 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_service_name | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Restart service] ********************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:125 Tuesday 07 January 2025 09:14:26 -0500 (0:00:00.037) 0:00:59.606 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_service_name | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Tuesday 07 January 2025 09:14:26 -0500 (0:00:00.039) 0:00:59.645 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": {}, "__podman_quadlet_str": "---\napiVersion: v1\nkind: PersistentVolumeClaim\nmetadata:\n name: wp-pv-claim\n labels:\n app: wordpress\nspec:\n accessModes:\n - ReadWriteOnce\n resources:\n requests:\n storage: 20Gi\n---\napiVersion: v1\nkind: Pod\nmetadata:\n name: quadlet-demo\nspec:\n containers:\n - name: wordpress\n image: quay.io/linux-system-roles/wordpress:4.8-apache\n env:\n - name: WORDPRESS_DB_HOST\n value: quadlet-demo-mysql\n - name: WORDPRESS_DB_PASSWORD\n valueFrom:\n secretKeyRef:\n name: mysql-root-password-kube\n key: password\n volumeMounts:\n - name: wordpress-persistent-storage\n mountPath: /var/www/html\n resources:\n requests:\n memory: \"64Mi\"\n cpu: \"250m\"\n limits:\n memory: \"128Mi\"\n cpu: \"500m\"\n - name: envoy\n image: quay.io/linux-system-roles/envoyproxy:v1.25.0\n volumeMounts:\n - name: config-volume\n mountPath: /etc/envoy\n - name: certificates\n mountPath: /etc/envoy-certificates\n env:\n - name: ENVOY_UID\n value: \"0\"\n resources:\n requests:\n memory: \"64Mi\"\n cpu: \"250m\"\n limits:\n memory: \"128Mi\"\n cpu: \"500m\"\n volumes:\n - name: config-volume\n configMap:\n name: envoy-proxy-config\n - name: certificates\n secret:\n secretName: envoy-certificates\n - name: wordpress-persistent-storage\n persistentVolumeClaim:\n claimName: wp-pv-claim\n - name: www # not used - for testing hostpath\n hostPath:\n path: /tmp/httpd3\n - name: create # not used - for testing hostpath\n hostPath:\n path: /tmp/httpd3-create\n", "__podman_quadlet_template_src": "quadlet-demo.yml.j2" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Tuesday 07 January 2025 09:14:26 -0500 (0:00:00.086) 0:00:59.732 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "created", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Tuesday 07 January 2025 09:14:26 -0500 (0:00:00.051) 0:00:59.783 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_quadlet_str", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Tuesday 07 January 2025 09:14:26 -0500 (0:00:00.055) 0:00:59.838 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-demo", "__podman_quadlet_type": "yml", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Tuesday 07 January 2025 09:14:26 -0500 (0:00:00.059) 0:00:59.897 ******* included: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Tuesday 07 January 2025 09:14:27 -0500 (0:00:00.105) 0:01:00.003 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Tuesday 07 January 2025 09:14:27 -0500 (0:00:00.060) 0:01:00.063 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Tuesday 07 January 2025 09:14:27 -0500 (0:00:00.062) 0:01:00.126 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Tuesday 07 January 2025 09:14:27 -0500 (0:00:00.078) 0:01:00.205 ******* ok: [managed-node2] => { "changed": false, "stat": { "atime": 1736258992.2408626, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1736258966.2956612, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9125782, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "117023548", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Tuesday 07 January 2025 09:14:27 -0500 (0:00:00.625) 0:01:00.830 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Tuesday 07 January 2025 09:14:27 -0500 (0:00:00.036) 0:01:00.867 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Tuesday 07 January 2025 09:14:27 -0500 (0:00:00.046) 0:01:00.914 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Tuesday 07 January 2025 09:14:28 -0500 (0:00:00.058) 0:01:00.972 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Tuesday 07 January 2025 09:14:28 -0500 (0:00:00.057) 0:01:01.029 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Tuesday 07 January 2025 09:14:28 -0500 (0:00:00.089) 0:01:01.118 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Tuesday 07 January 2025 09:14:28 -0500 (0:00:00.076) 0:01:01.194 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Tuesday 07 January 2025 09:14:28 -0500 (0:00:00.065) 0:01:01.260 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Tuesday 07 January 2025 09:14:28 -0500 (0:00:00.136) 0:01:01.397 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": "", "__podman_service_name": "", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Tuesday 07 January 2025 09:14:28 -0500 (0:00:00.096) 0:01:01.494 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Tuesday 07 January 2025 09:14:28 -0500 (0:00:00.057) 0:01:01.552 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_kube_yamls_raw | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Tuesday 07 January 2025 09:14:28 -0500 (0:00:00.055) 0:01:01.607 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/etc/containers/systemd/quadlet-demo.yml", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Tuesday 07 January 2025 09:14:28 -0500 (0:00:00.138) 0:01:01.745 ******* ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Tuesday 07 January 2025 09:14:28 -0500 (0:00:00.051) 0:01:01.797 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state == \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Tuesday 07 January 2025 09:14:28 -0500 (0:00:00.038) 0:01:01.836 ******* included: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2 Tuesday 07 January 2025 09:14:28 -0500 (0:00:00.089) 0:01:01.926 ******* included: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Tuesday 07 January 2025 09:14:29 -0500 (0:00:00.087) 0:01:02.013 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Tuesday 07 January 2025 09:14:29 -0500 (0:00:00.055) 0:01:02.069 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Tuesday 07 January 2025 09:14:29 -0500 (0:00:00.065) 0:01:02.134 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create host directories] ************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7 Tuesday 07 January 2025 09:14:29 -0500 (0:00:00.065) 0:01:02.200 ******* skipping: [managed-node2] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Ensure container images are present] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 Tuesday 07 January 2025 09:14:29 -0500 (0:00:00.056) 0:01:02.256 ******* skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39 Tuesday 07 January 2025 09:14:29 -0500 (0:00:00.061) 0:01:02.317 ******* ok: [managed-node2] => { "changed": false, "gid": 0, "group": "root", "mode": "0755", "owner": "root", "path": "/etc/containers/systemd", "secontext": "system_u:object_r:etc_t:s0", "size": 136, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ******** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:48 Tuesday 07 January 2025 09:14:29 -0500 (0:00:00.475) 0:01:02.793 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_quadlet_file_src | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:58 Tuesday 07 January 2025 09:14:29 -0500 (0:00:00.042) 0:01:02.835 ******* changed: [managed-node2] => { "changed": true, "checksum": "998dccde0483b1654327a46ddd89cbaa47650370", "dest": "/etc/containers/systemd/quadlet-demo.yml", "gid": 0, "group": "root", "md5sum": "fd890594adfc24339cb9cdc5e7b19a66", "mode": "0644", "owner": "root", "secontext": "system_u:object_r:etc_t:s0", "size": 1605, "src": "/root/.ansible/tmp/ansible-tmp-1736259269.9375236-20890-15337147536376/.source.yml", "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] ******* task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:70 Tuesday 07 January 2025 09:14:30 -0500 (0:00:00.789) 0:01:03.624 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_copy_content is skipped", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Reload systemctl] ********************* task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:82 Tuesday 07 January 2025 09:14:30 -0500 (0:00:00.066) 0:01:03.691 ******* ok: [managed-node2] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Start service] ************************ task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110 Tuesday 07 January 2025 09:14:31 -0500 (0:00:00.855) 0:01:04.546 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_service_name | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Restart service] ********************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:125 Tuesday 07 January 2025 09:14:31 -0500 (0:00:00.111) 0:01:04.658 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_service_name | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Tuesday 07 January 2025 09:14:31 -0500 (0:00:00.044) 0:01:04.703 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_file_src": "quadlet-demo.kube", "__podman_quadlet_spec": {}, "__podman_quadlet_str": "[Install]\nWantedBy=default.target\n\n[Unit]\nRequires=quadlet-demo-mysql.service\nAfter=quadlet-demo-mysql.service\n\n[Kube]\n# Point to the yaml file in the same directory\nYaml=quadlet-demo.yml\n# Use the quadlet-demo network\nNetwork=quadlet-demo.network\n# Publish the envoy proxy data port\nPublishPort=8000:8080\n# Publish the envoy proxy admin port\nPublishPort=9000:9901\n# Use the envoy proxy config map in the same directory\nConfigMap=envoy-proxy-configmap.yml", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Tuesday 07 January 2025 09:14:31 -0500 (0:00:00.051) 0:01:04.755 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "created", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Tuesday 07 January 2025 09:14:31 -0500 (0:00:00.042) 0:01:04.797 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_quadlet_file_src", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Tuesday 07 January 2025 09:14:31 -0500 (0:00:00.036) 0:01:04.833 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-demo", "__podman_quadlet_type": "kube", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Tuesday 07 January 2025 09:14:31 -0500 (0:00:00.050) 0:01:04.884 ******* included: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Tuesday 07 January 2025 09:14:32 -0500 (0:00:00.069) 0:01:04.954 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Tuesday 07 January 2025 09:14:32 -0500 (0:00:00.038) 0:01:04.992 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Tuesday 07 January 2025 09:14:32 -0500 (0:00:00.037) 0:01:05.030 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Tuesday 07 January 2025 09:14:32 -0500 (0:00:00.046) 0:01:05.077 ******* ok: [managed-node2] => { "changed": false, "stat": { "atime": 1736258992.2408626, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1736258966.2956612, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9125782, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "117023548", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Tuesday 07 January 2025 09:14:32 -0500 (0:00:00.378) 0:01:05.456 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Tuesday 07 January 2025 09:14:32 -0500 (0:00:00.036) 0:01:05.493 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Tuesday 07 January 2025 09:14:32 -0500 (0:00:00.034) 0:01:05.527 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Tuesday 07 January 2025 09:14:32 -0500 (0:00:00.036) 0:01:05.564 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Tuesday 07 January 2025 09:14:32 -0500 (0:00:00.039) 0:01:05.603 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Tuesday 07 January 2025 09:14:32 -0500 (0:00:00.049) 0:01:05.653 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Tuesday 07 January 2025 09:14:32 -0500 (0:00:00.054) 0:01:05.707 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Tuesday 07 January 2025 09:14:32 -0500 (0:00:00.040) 0:01:05.748 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Tuesday 07 January 2025 09:14:32 -0500 (0:00:00.040) 0:01:05.789 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": [ "quadlet-demo.yml" ], "__podman_service_name": "quadlet-demo.service", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Tuesday 07 January 2025 09:14:32 -0500 (0:00:00.068) 0:01:05.857 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Tuesday 07 January 2025 09:14:32 -0500 (0:00:00.035) 0:01:05.893 ******* ok: [managed-node2] => { "changed": false, "content": "LS0tCmFwaVZlcnNpb246IHYxCmtpbmQ6IFBlcnNpc3RlbnRWb2x1bWVDbGFpbQptZXRhZGF0YToKICBuYW1lOiB3cC1wdi1jbGFpbQogIGxhYmVsczoKICAgIGFwcDogd29yZHByZXNzCnNwZWM6CiAgYWNjZXNzTW9kZXM6CiAgLSBSZWFkV3JpdGVPbmNlCiAgcmVzb3VyY2VzOgogICAgcmVxdWVzdHM6CiAgICAgIHN0b3JhZ2U6IDIwR2kKLS0tCmFwaVZlcnNpb246IHYxCmtpbmQ6IFBvZAptZXRhZGF0YToKICBuYW1lOiBxdWFkbGV0LWRlbW8Kc3BlYzoKICBjb250YWluZXJzOgogIC0gbmFtZTogd29yZHByZXNzCiAgICBpbWFnZTogcXVheS5pby9saW51eC1zeXN0ZW0tcm9sZXMvd29yZHByZXNzOjQuOC1hcGFjaGUKICAgIGVudjoKICAgIC0gbmFtZTogV09SRFBSRVNTX0RCX0hPU1QKICAgICAgdmFsdWU6IHF1YWRsZXQtZGVtby1teXNxbAogICAgLSBuYW1lOiBXT1JEUFJFU1NfREJfUEFTU1dPUkQKICAgICAgdmFsdWVGcm9tOgogICAgICAgIHNlY3JldEtleVJlZjoKICAgICAgICAgIG5hbWU6IG15c3FsLXJvb3QtcGFzc3dvcmQta3ViZQogICAgICAgICAga2V5OiBwYXNzd29yZAogICAgdm9sdW1lTW91bnRzOgogICAgLSBuYW1lOiB3b3JkcHJlc3MtcGVyc2lzdGVudC1zdG9yYWdlCiAgICAgIG1vdW50UGF0aDogL3Zhci93d3cvaHRtbAogICAgcmVzb3VyY2VzOgogICAgICByZXF1ZXN0czoKICAgICAgICBtZW1vcnk6ICI2NE1pIgogICAgICAgIGNwdTogIjI1MG0iCiAgICAgIGxpbWl0czoKICAgICAgICBtZW1vcnk6ICIxMjhNaSIKICAgICAgICBjcHU6ICI1MDBtIgogIC0gbmFtZTogZW52b3kKICAgIGltYWdlOiBxdWF5LmlvL2xpbnV4LXN5c3RlbS1yb2xlcy9lbnZveXByb3h5OnYxLjI1LjAKICAgIHZvbHVtZU1vdW50czoKICAgIC0gbmFtZTogY29uZmlnLXZvbHVtZQogICAgICBtb3VudFBhdGg6IC9ldGMvZW52b3kKICAgIC0gbmFtZTogY2VydGlmaWNhdGVzCiAgICAgIG1vdW50UGF0aDogL2V0Yy9lbnZveS1jZXJ0aWZpY2F0ZXMKICAgIGVudjoKICAgIC0gbmFtZTogRU5WT1lfVUlECiAgICAgIHZhbHVlOiAiMCIKICAgIHJlc291cmNlczoKICAgICAgcmVxdWVzdHM6CiAgICAgICAgbWVtb3J5OiAiNjRNaSIKICAgICAgICBjcHU6ICIyNTBtIgogICAgICBsaW1pdHM6CiAgICAgICAgbWVtb3J5OiAiMTI4TWkiCiAgICAgICAgY3B1OiAiNTAwbSIKICB2b2x1bWVzOgogIC0gbmFtZTogY29uZmlnLXZvbHVtZQogICAgY29uZmlnTWFwOgogICAgICBuYW1lOiBlbnZveS1wcm94eS1jb25maWcKICAtIG5hbWU6IGNlcnRpZmljYXRlcwogICAgc2VjcmV0OgogICAgICBzZWNyZXROYW1lOiBlbnZveS1jZXJ0aWZpY2F0ZXMKICAtIG5hbWU6IHdvcmRwcmVzcy1wZXJzaXN0ZW50LXN0b3JhZ2UKICAgIHBlcnNpc3RlbnRWb2x1bWVDbGFpbToKICAgICAgY2xhaW1OYW1lOiB3cC1wdi1jbGFpbQogIC0gbmFtZTogd3d3ICAjIG5vdCB1c2VkIC0gZm9yIHRlc3RpbmcgaG9zdHBhdGgKICAgIGhvc3RQYXRoOgogICAgICBwYXRoOiAvdG1wL2h0dHBkMwogIC0gbmFtZTogY3JlYXRlICAjIG5vdCB1c2VkIC0gZm9yIHRlc3RpbmcgaG9zdHBhdGgKICAgIGhvc3RQYXRoOgogICAgICBwYXRoOiAvdG1wL2h0dHBkMy1jcmVhdGUK", "encoding": "base64", "source": "/etc/containers/systemd/quadlet-demo.yml" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Tuesday 07 January 2025 09:14:33 -0500 (0:00:00.451) 0:01:06.345 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_images": [ "quay.io/linux-system-roles/wordpress:4.8-apache", "quay.io/linux-system-roles/envoyproxy:v1.25.0" ], "__podman_quadlet_file": "/etc/containers/systemd/quadlet-demo.kube", "__podman_volumes": [ "/tmp/httpd3", "/tmp/httpd3-create" ] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Tuesday 07 January 2025 09:14:33 -0500 (0:00:00.093) 0:01:06.438 ******* ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Tuesday 07 January 2025 09:14:33 -0500 (0:00:00.042) 0:01:06.481 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state == \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Tuesday 07 January 2025 09:14:33 -0500 (0:00:00.032) 0:01:06.513 ******* included: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2 Tuesday 07 January 2025 09:14:33 -0500 (0:00:00.085) 0:01:06.599 ******* included: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Tuesday 07 January 2025 09:14:33 -0500 (0:00:00.067) 0:01:06.666 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Tuesday 07 January 2025 09:14:33 -0500 (0:00:00.033) 0:01:06.700 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Tuesday 07 January 2025 09:14:33 -0500 (0:00:00.032) 0:01:06.733 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create host directories] ************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7 Tuesday 07 January 2025 09:14:33 -0500 (0:00:00.033) 0:01:06.766 ******* changed: [managed-node2] => (item=/tmp/httpd3) => { "ansible_loop_var": "item", "changed": true, "gid": 0, "group": "root", "item": "/tmp/httpd3", "mode": "0755", "owner": "root", "path": "/tmp/httpd3", "secontext": "unconfined_u:object_r:user_tmp_t:s0", "size": 6, "state": "directory", "uid": 0 } changed: [managed-node2] => (item=/tmp/httpd3-create) => { "ansible_loop_var": "item", "changed": true, "gid": 0, "group": "root", "item": "/tmp/httpd3-create", "mode": "0755", "owner": "root", "path": "/tmp/httpd3-create", "secontext": "unconfined_u:object_r:user_tmp_t:s0", "size": 6, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure container images are present] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 Tuesday 07 January 2025 09:14:34 -0500 (0:00:00.767) 0:01:07.534 ******* changed: [managed-node2] => (item=None) => { "attempts": 1, "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } changed: [managed-node2] => (item=None) => { "attempts": 1, "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } changed: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39 Tuesday 07 January 2025 09:15:02 -0500 (0:00:28.079) 0:01:35.613 ******* ok: [managed-node2] => { "changed": false, "gid": 0, "group": "root", "mode": "0755", "owner": "root", "path": "/etc/containers/systemd", "secontext": "system_u:object_r:etc_t:s0", "size": 160, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ******** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:48 Tuesday 07 January 2025 09:15:03 -0500 (0:00:00.425) 0:01:36.039 ******* changed: [managed-node2] => { "changed": true, "checksum": "7a5c73a5d935a42431c87bcdbeb8a04ed0909dc7", "dest": "/etc/containers/systemd/quadlet-demo.kube", "gid": 0, "group": "root", "md5sum": "da53c88f92b68b0487aa209f795b6bb3", "mode": "0644", "owner": "root", "secontext": "system_u:object_r:etc_t:s0", "size": 456, "src": "/root/.ansible/tmp/ansible-tmp-1736259303.1670082-21735-227828488195663/.source.kube", "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:58 Tuesday 07 January 2025 09:15:03 -0500 (0:00:00.887) 0:01:36.927 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_quadlet_file_src", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] ******* task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:70 Tuesday 07 January 2025 09:15:04 -0500 (0:00:00.057) 0:01:36.984 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_copy_file is skipped", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Reload systemctl] ********************* task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:82 Tuesday 07 January 2025 09:15:04 -0500 (0:00:00.054) 0:01:37.039 ******* ok: [managed-node2] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Start service] ************************ task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110 Tuesday 07 January 2025 09:15:04 -0500 (0:00:00.894) 0:01:37.934 ******* changed: [managed-node2] => { "changed": true, "name": "quadlet-demo.service", "state": "started", "status": { "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0", "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "sysinit.target quadlet-demo-mysql.service -.mount systemd-journald.socket quadlet-demo-network.service network-online.target basic.target system.slice", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "shutdown.target multi-user.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "quadlet-demo.service", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveMemoryHigh": "3698233344", "EffectiveMemoryMax": "3698233344", "EffectiveTasksMax": "22349", "Environment": "PODMAN_SYSTEMD_UNIT=quadlet-demo.service", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube play --replace --service-container=true --network systemd-quadlet-demo --configmap /etc/containers/systemd/envoy-proxy-configmap.yml --publish 8000:8080 --publish 9000:9901 /etc/containers/systemd/quadlet-demo.yml ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube play --replace --service-container=true --network systemd-quadlet-demo --configmap /etc/containers/systemd/envoy-proxy-configmap.yml --publish 8000:8080 --publish 9000:9901 /etc/containers/systemd/quadlet-demo.yml ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPost": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube down /etc/containers/systemd/quadlet-demo.yml ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPostEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube down /etc/containers/systemd/quadlet-demo.yml ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/run/systemd/generator/quadlet-demo.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "quadlet-demo.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13968", "LimitNPROCSoft": "13968", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13968", "LimitSIGPENDINGSoft": "13968", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "2489139200", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "[not set]", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "[not set]", "MemorySwapMax": "infinity", "MemorySwapPeak": "[not set]", "MemoryZSwapCurrent": "[not set]", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "quadlet-demo.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "all", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "quadlet-demo-mysql.service -.mount quadlet-demo-network.service sysinit.target system.slice", "RequiresMountsFor": "/run/containers", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "SourcePath": "/etc/containers/systemd/quadlet-demo.kube", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-demo", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22349", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "WantedBy": "multi-user.target", "Wants": "network-online.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity" } } TASK [fedora.linux_system_roles.podman : Restart service] ********************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:125 Tuesday 07 January 2025 09:15:07 -0500 (0:00:02.473) 0:01:40.408 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_service_started is changed", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Cancel linger] ************************ task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:196 Tuesday 07 January 2025 09:15:07 -0500 (0:00:00.065) 0:01:40.473 ******* skipping: [managed-node2] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Handle credential files - absent] ***** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:202 Tuesday 07 January 2025 09:15:07 -0500 (0:00:00.055) 0:01:40.528 ******* skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - absent] ******** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:211 Tuesday 07 January 2025 09:15:07 -0500 (0:00:00.144) 0:01:40.673 ******* skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [Check quadlet files] ***************************************************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:96 Tuesday 07 January 2025 09:15:07 -0500 (0:00:00.083) 0:01:40.759 ******* ok: [managed-node2] => { "changed": false, "cmd": [ "ls", "-alrtF", "/etc/containers/systemd" ], "delta": "0:00:00.005110", "end": "2025-01-07 09:15:08.176947", "rc": 0, "start": "2025-01-07 09:15:08.171837" } STDOUT: total 24 drwxr-xr-x. 9 root root 178 Jan 7 09:10 ../ -rw-r--r--. 1 root root 74 Jan 7 09:13 quadlet-demo.network -rw-r--r--. 1 root root 9 Jan 7 09:14 quadlet-demo-mysql.volume -rw-r--r--. 1 root root 363 Jan 7 09:14 quadlet-demo-mysql.container -rw-r--r--. 1 root root 2102 Jan 7 09:14 envoy-proxy-configmap.yml -rw-r--r--. 1 root root 1605 Jan 7 09:14 quadlet-demo.yml -rw-r--r--. 1 root root 456 Jan 7 09:15 quadlet-demo.kube drwxr-xr-x. 2 root root 185 Jan 7 09:15 ./ TASK [Check containers] ******************************************************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:100 Tuesday 07 January 2025 09:15:08 -0500 (0:00:00.464) 0:01:41.224 ******* ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "ps", "-a" ], "delta": "0:00:00.042843", "end": "2025-01-07 09:15:08.641905", "failed_when_result": false, "rc": 0, "start": "2025-01-07 09:15:08.599062" } STDOUT: CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES 7b54a3800ffc localhost/podman-pause:5.3.1-1733097600 2 minutes ago Up 2 minutes 5d7db1ec4fb5-service b8b3f35398f1 localhost/podman-pause:5.3.1-1733097600 2 minutes ago Up 2 minutes 0.0.0.0:15002->80/tcp 5c56b3244e88-infra 153c7d559088 quay.io/libpod/testimage:20210610 2 minutes ago Up 2 minutes 0.0.0.0:15002->80/tcp httpd2-httpd2 c8ea84d4fdcd localhost/podman-pause:5.3.1-1733097600 About a minute ago Up About a minute aeb421c16034-service 0479d7891e30 localhost/podman-pause:5.3.1-1733097600 About a minute ago Up About a minute 0.0.0.0:15003->80/tcp 3457d4d54eec-infra 4bd23d3a55ea quay.io/libpod/testimage:20210610 About a minute ago Up About a minute 0.0.0.0:15003->80/tcp httpd3-httpd3 c22d23c0f7b4 quay.io/linux-system-roles/mysql:5.6 mysqld 47 seconds ago Up 47 seconds (healthy) 3306/tcp quadlet-demo-mysql f433509c99b1 localhost/podman-pause:5.3.1-1733097600 3 seconds ago Up 3 seconds a96f3a51b8d1-service 7cdb1162de3d localhost/podman-pause:5.3.1-1733097600 2 seconds ago Up 2 seconds 0.0.0.0:8000->8080/tcp, 0.0.0.0:9000->9901/tcp 7cd86f9cd249-infra b6284e900e38 quay.io/linux-system-roles/wordpress:4.8-apache apache2-foregroun... 2 seconds ago Up 2 seconds 0.0.0.0:8000->8080/tcp, 0.0.0.0:9000->9901/tcp, 80/tcp quadlet-demo-wordpress 8975483c7a2e quay.io/linux-system-roles/envoyproxy:v1.25.0 envoy -c /etc/env... 2 seconds ago Up 1 second 0.0.0.0:8000->8080/tcp, 0.0.0.0:9000->9901/tcp, 10000/tcp quadlet-demo-envoy TASK [Check volumes] *********************************************************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:105 Tuesday 07 January 2025 09:15:08 -0500 (0:00:00.473) 0:01:41.698 ******* ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "volume", "ls" ], "delta": "0:00:00.030452", "end": "2025-01-07 09:15:09.112333", "failed_when_result": false, "rc": 0, "start": "2025-01-07 09:15:09.081881" } STDOUT: DRIVER VOLUME NAME local systemd-quadlet-demo-mysql local wp-pv-claim local envoy-proxy-config local envoy-certificates TASK [Check pods] ************************************************************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:110 Tuesday 07 January 2025 09:15:09 -0500 (0:00:00.451) 0:01:42.149 ******* ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "pod", "ps", "--ctr-ids", "--ctr-names", "--ctr-status" ], "delta": "0:00:00.039173", "end": "2025-01-07 09:15:09.562384", "failed_when_result": false, "rc": 0, "start": "2025-01-07 09:15:09.523211" } STDOUT: POD ID NAME STATUS CREATED INFRA ID IDS NAMES STATUS 7cd86f9cd249 quadlet-demo Running 3 seconds ago 7cdb1162de3d 7cdb1162de3d,b6284e900e38,8975483c7a2e 7cd86f9cd249-infra,quadlet-demo-wordpress,quadlet-demo-envoy running,running,running 3457d4d54eec httpd3 Running About a minute ago 0479d7891e30 0479d7891e30,4bd23d3a55ea 3457d4d54eec-infra,httpd3-httpd3 running,running 5c56b3244e88 httpd2 Running 2 minutes ago b8b3f35398f1 b8b3f35398f1,153c7d559088 5c56b3244e88-infra,httpd2-httpd2 running,running TASK [Check systemd] *********************************************************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:115 Tuesday 07 January 2025 09:15:09 -0500 (0:00:00.431) 0:01:42.580 ******* ok: [managed-node2] => { "changed": false, "cmd": "set -euo pipefail; systemctl list-units | grep quadlet", "delta": "0:00:00.014526", "end": "2025-01-07 09:15:09.949761", "failed_when_result": false, "rc": 0, "start": "2025-01-07 09:15:09.935235" } STDOUT: quadlet-demo-mysql-volume.service loaded active exited quadlet-demo-mysql-volume.service quadlet-demo-mysql.service loaded active running quadlet-demo-mysql.service quadlet-demo-network.service loaded active exited quadlet-demo-network.service quadlet-demo.service loaded active running quadlet-demo.service TASK [Check web] *************************************************************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:121 Tuesday 07 January 2025 09:15:10 -0500 (0:00:00.387) 0:01:42.967 ******* FAILED - RETRYING: [managed-node2]: Check web (6 retries left). FAILED - RETRYING: [managed-node2]: Check web (5 retries left). FAILED - RETRYING: [managed-node2]: Check web (4 retries left). FAILED - RETRYING: [managed-node2]: Check web (3 retries left). FAILED - RETRYING: [managed-node2]: Check web (2 retries left). FAILED - RETRYING: [managed-node2]: Check web (1 retries left). fatal: [managed-node2]: FAILED! => { "attempts": 6, "changed": false, "dest": "/run/out", "elapsed": 0, "url": "https://localhost:8000" } MSG: Request failed: TASK [Dump journal] ************************************************************ task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:142 Tuesday 07 January 2025 09:15:43 -0500 (0:00:33.156) 0:02:16.123 ******* fatal: [managed-node2]: FAILED! => { "changed": false, "cmd": [ "journalctl", "-ex" ], "delta": "0:00:00.031813", "end": "2025-01-07 09:15:43.526513", "failed_when_result": true, "rc": 0, "start": "2025-01-07 09:15:43.494700" } STDOUT: Jan 07 09:09:26 managed-node2 dbus-broker-launch[619]: Noticed file-system modification, trigger reload. ░░ Subject: A configuration directory was written to ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ A write was detected to one of the directories containing D-Bus configuration ░░ files, triggering a configuration reload. ░░ ░░ This functionality exists for backwards compatibility to pick up changes to ░░ D-Bus configuration without an explicit reolad request. Typically when ░░ installing or removing third-party software causes D-Bus configuration files ░░ to be added or removed. ░░ ░░ It is worth noting that this may cause partial configuration to be loaded in ░░ case dispatching this notification races with the writing of the configuration ░░ files. However, a future notification will then cause the configuration to be ░░ reladed again. Jan 07 09:09:26 managed-node2 dbus-broker-launch[619]: Noticed file-system modification, trigger reload. ░░ Subject: A configuration directory was written to ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ A write was detected to one of the directories containing D-Bus configuration ░░ files, triggering a configuration reload. ░░ ░░ This functionality exists for backwards compatibility to pick up changes to ░░ D-Bus configuration without an explicit reolad request. Typically when ░░ installing or removing third-party software causes D-Bus configuration files ░░ to be added or removed. ░░ ░░ It is worth noting that this may cause partial configuration to be loaded in ░░ case dispatching this notification races with the writing of the configuration ░░ files. However, a future notification will then cause the configuration to be ░░ reladed again. Jan 07 09:09:27 managed-node2 systemd[1]: Listening on pcscd.socket - PC/SC Smart Card Daemon Activation Socket. ░░ Subject: A start job for unit pcscd.socket has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pcscd.socket has finished successfully. ░░ ░░ The job identifier is 1419. Jan 07 09:09:47 managed-node2 systemd[1]: Started run-r4c5d8ba7d3c64765b8320d78c79f9dfc.service - /usr/bin/systemctl start man-db-cache-update. ░░ Subject: A start job for unit run-r4c5d8ba7d3c64765b8320d78c79f9dfc.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit run-r4c5d8ba7d3c64765b8320d78c79f9dfc.service has finished successfully. ░░ ░░ The job identifier is 1497. Jan 07 09:09:47 managed-node2 systemd[1]: Reload requested from client PID 9590 ('systemctl') (unit session-6.scope)... Jan 07 09:09:47 managed-node2 systemd[1]: Reloading... Jan 07 09:09:47 managed-node2 systemd[1]: Reloading finished in 187 ms. Jan 07 09:09:47 managed-node2 systemd[1]: Starting man-db-cache-update.service... ░░ Subject: A start job for unit man-db-cache-update.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit man-db-cache-update.service has begun execution. ░░ ░░ The job identifier is 1575. Jan 07 09:09:48 managed-node2 systemd[1]: Queuing reload/restart jobs for marked units… Jan 07 09:09:49 managed-node2 sudo[8767]: pam_unix(sudo:session): session closed for user root Jan 07 09:09:50 managed-node2 python3.12[9993]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 07 09:09:51 managed-node2 python3.12[10130]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None Jan 07 09:09:52 managed-node2 python3.12[10262]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 07 09:09:54 managed-node2 systemd[1]: man-db-cache-update.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit man-db-cache-update.service has successfully entered the 'dead' state. Jan 07 09:09:54 managed-node2 systemd[1]: Finished man-db-cache-update.service. ░░ Subject: A start job for unit man-db-cache-update.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit man-db-cache-update.service has finished successfully. ░░ ░░ The job identifier is 1575. Jan 07 09:09:54 managed-node2 systemd[1]: run-r4c5d8ba7d3c64765b8320d78c79f9dfc.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-r4c5d8ba7d3c64765b8320d78c79f9dfc.service has successfully entered the 'dead' state. Jan 07 09:09:54 managed-node2 python3.12[10395]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 07 09:09:55 managed-node2 python3.12[10530]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 07 09:09:55 managed-node2 python3.12[10661]: ansible-ansible.legacy.dnf Invoked with name=['firewalld'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 07 09:09:57 managed-node2 python3.12[10793]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False scope=system no_block=False state=None enabled=None force=None Jan 07 09:09:57 managed-node2 python3.12[10926]: ansible-ansible.legacy.systemd Invoked with name=firewalld state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Jan 07 09:09:57 managed-node2 systemd[1]: Reload requested from client PID 10929 ('systemctl') (unit session-6.scope)... Jan 07 09:09:57 managed-node2 systemd[1]: Reloading... Jan 07 09:09:58 managed-node2 systemd[1]: Reloading finished in 187 ms. Jan 07 09:09:58 managed-node2 systemd[1]: Starting firewalld.service - firewalld - dynamic firewall daemon... ░░ Subject: A start job for unit firewalld.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit firewalld.service has begun execution. ░░ ░░ The job identifier is 1653. Jan 07 09:09:59 managed-node2 systemd[1]: Started firewalld.service - firewalld - dynamic firewall daemon. ░░ Subject: A start job for unit firewalld.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit firewalld.service has finished successfully. ░░ ░░ The job identifier is 1653. Jan 07 09:09:59 managed-node2 kernel: Warning: Unmaintained driver is detected: ip_set Jan 07 09:10:00 managed-node2 python3.12[11138]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['15001-15003/tcp'] permanent=True runtime=True state=enabled __report_changed=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Jan 07 09:10:00 managed-node2 systemd[1]: Starting polkit.service - Authorization Manager... ░░ Subject: A start job for unit polkit.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit polkit.service has begun execution. ░░ ░░ The job identifier is 1737. Jan 07 09:10:00 managed-node2 rsyslogd[656]: imjournal: journal files changed, reloading... [v8.2408.0-2.el10 try https://www.rsyslog.com/e/0 ] Jan 07 09:10:00 managed-node2 polkitd[11158]: Started polkitd version 125 Jan 07 09:10:00 managed-node2 systemd[1]: Started polkit.service - Authorization Manager. ░░ Subject: A start job for unit polkit.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit polkit.service has finished successfully. ░░ ░░ The job identifier is 1737. Jan 07 09:10:01 managed-node2 python3.12[11299]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 07 09:10:02 managed-node2 python3.12[11430]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 07 09:10:03 managed-node2 python3.12[11561]: ansible-ansible.legacy.dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 07 09:10:03 managed-node2 python3.12[11693]: ansible-ansible.legacy.dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 07 09:10:04 managed-node2 dbus-broker-launch[619]: Noticed file-system modification, trigger reload. ░░ Subject: A configuration directory was written to ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ A write was detected to one of the directories containing D-Bus configuration ░░ files, triggering a configuration reload. ░░ ░░ This functionality exists for backwards compatibility to pick up changes to ░░ D-Bus configuration without an explicit reolad request. Typically when ░░ installing or removing third-party software causes D-Bus configuration files ░░ to be added or removed. ░░ ░░ It is worth noting that this may cause partial configuration to be loaded in ░░ case dispatching this notification races with the writing of the configuration ░░ files. However, a future notification will then cause the configuration to be ░░ reladed again. Jan 07 09:10:04 managed-node2 dbus-broker-launch[619]: Noticed file-system modification, trigger reload. ░░ Subject: A configuration directory was written to ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ A write was detected to one of the directories containing D-Bus configuration ░░ files, triggering a configuration reload. ░░ ░░ This functionality exists for backwards compatibility to pick up changes to ░░ D-Bus configuration without an explicit reolad request. Typically when ░░ installing or removing third-party software causes D-Bus configuration files ░░ to be added or removed. ░░ ░░ It is worth noting that this may cause partial configuration to be loaded in ░░ case dispatching this notification races with the writing of the configuration ░░ files. However, a future notification will then cause the configuration to be ░░ reladed again. Jan 07 09:10:04 managed-node2 systemd[1]: Started run-r785432b6fcc84070a0d9701d8e8d1eb2.service - /usr/bin/systemctl start man-db-cache-update. ░░ Subject: A start job for unit run-r785432b6fcc84070a0d9701d8e8d1eb2.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit run-r785432b6fcc84070a0d9701d8e8d1eb2.service has finished successfully. ░░ ░░ The job identifier is 1818. Jan 07 09:10:04 managed-node2 systemd[1]: Starting man-db-cache-update.service... ░░ Subject: A start job for unit man-db-cache-update.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit man-db-cache-update.service has begun execution. ░░ ░░ The job identifier is 1896. Jan 07 09:10:05 managed-node2 python3.12[11831]: ansible-setup Invoked with filter=['ansible_selinux'] gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d Jan 07 09:10:05 managed-node2 systemd[1]: man-db-cache-update.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit man-db-cache-update.service has successfully entered the 'dead' state. Jan 07 09:10:05 managed-node2 systemd[1]: Finished man-db-cache-update.service. ░░ Subject: A start job for unit man-db-cache-update.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit man-db-cache-update.service has finished successfully. ░░ ░░ The job identifier is 1896. Jan 07 09:10:05 managed-node2 systemd[1]: run-r785432b6fcc84070a0d9701d8e8d1eb2.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-r785432b6fcc84070a0d9701d8e8d1eb2.service has successfully entered the 'dead' state. Jan 07 09:10:07 managed-node2 python3.12[11995]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['15001-15003'] proto=tcp setype=http_port_t state=present local=False ignore_selinux_state=False reload=True Jan 07 09:10:09 managed-node2 kernel: SELinux: Converting 472 SID table entries... Jan 07 09:10:09 managed-node2 kernel: SELinux: policy capability network_peer_controls=1 Jan 07 09:10:09 managed-node2 kernel: SELinux: policy capability open_perms=1 Jan 07 09:10:09 managed-node2 kernel: SELinux: policy capability extended_socket_class=1 Jan 07 09:10:09 managed-node2 kernel: SELinux: policy capability always_check_network=0 Jan 07 09:10:09 managed-node2 kernel: SELinux: policy capability cgroup_seclabel=1 Jan 07 09:10:09 managed-node2 kernel: SELinux: policy capability nnp_nosuid_transition=1 Jan 07 09:10:09 managed-node2 kernel: SELinux: policy capability genfs_seclabel_symlinks=1 Jan 07 09:10:09 managed-node2 kernel: SELinux: policy capability ioctl_skip_cloexec=0 Jan 07 09:10:09 managed-node2 kernel: SELinux: policy capability userspace_initial_context=0 Jan 07 09:10:09 managed-node2 python3.12[12130]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked Jan 07 09:10:14 managed-node2 python3.12[12261]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 07 09:10:16 managed-node2 python3.12[12394]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/nopull.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 07 09:10:17 managed-node2 python3.12[12525]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 07 09:10:17 managed-node2 python3.12[12656]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/ansible-kubernetes.d/nopull.yml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 07 09:10:18 managed-node2 python3.12[12761]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/ansible-kubernetes.d/nopull.yml owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1736259017.5317495-9505-2206886160846/.source.yml _original_basename=.ejhcxkic follow=False checksum=d5dc917e3cae36de03aa971a17ac473f86fdf934 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jan 07 09:10:18 managed-node2 python3.12[12892]: ansible-containers.podman.podman_play Invoked with state=created kube_file=/etc/containers/ansible-kubernetes.d/nopull.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Jan 07 09:10:18 managed-node2 systemd[1]: var-lib-containers-storage-overlay-compat389402142-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-compat389402142-merged.mount has successfully entered the 'dead' state. Jan 07 09:10:18 managed-node2 kernel: evm: overlay not supported Jan 07 09:10:18 managed-node2 systemd[1]: var-lib-containers-storage-overlay-metacopy\x2dcheck2867156120-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-metacopy\x2dcheck2867156120-merged.mount has successfully entered the 'dead' state. Jan 07 09:10:18 managed-node2 podman[12899]: 2025-01-07 09:10:18.930800361 -0500 EST m=+0.075737307 system refresh Jan 07 09:10:19 managed-node2 podman[12899]: 2025-01-07 09:10:19.324004643 -0500 EST m=+0.468941435 image build df073f11d00fb4b98ef78dc488ede5a74765d3c444d6f30ab4429fc493278927 Jan 07 09:10:19 managed-node2 systemd[1]: Created slice machine.slice - Slice /machine. ░░ Subject: A start job for unit machine.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit machine.slice has finished successfully. ░░ ░░ The job identifier is 1976. Jan 07 09:10:19 managed-node2 systemd[1]: Created slice machine-libpod_pod_91785d133eb8b7e7387a18d990d58d828dd86152a46ba5e748f7873a554f8258.slice - cgroup machine-libpod_pod_91785d133eb8b7e7387a18d990d58d828dd86152a46ba5e748f7873a554f8258.slice. ░░ Subject: A start job for unit machine-libpod_pod_91785d133eb8b7e7387a18d990d58d828dd86152a46ba5e748f7873a554f8258.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit machine-libpod_pod_91785d133eb8b7e7387a18d990d58d828dd86152a46ba5e748f7873a554f8258.slice has finished successfully. ░░ ░░ The job identifier is 1975. Jan 07 09:10:19 managed-node2 podman[12899]: 2025-01-07 09:10:19.386258688 -0500 EST m=+0.531195392 container create 53850f64337353052fb00ced894962509bb152080d70aedec878c74e242d2e61 (image=localhost/podman-pause:5.3.1-1733097600, name=91785d133eb8-infra, pod_id=91785d133eb8b7e7387a18d990d58d828dd86152a46ba5e748f7873a554f8258, io.buildah.version=1.38.0) Jan 07 09:10:19 managed-node2 podman[12899]: 2025-01-07 09:10:19.391118237 -0500 EST m=+0.536054940 pod create 91785d133eb8b7e7387a18d990d58d828dd86152a46ba5e748f7873a554f8258 (image=, name=nopull) Jan 07 09:10:22 managed-node2 podman[12899]: 2025-01-07 09:10:22.289971828 -0500 EST m=+3.434908532 container create c5e812091842fdd1fa646b501e8c221e3bf0b37fb574017d5d0fd9c402fcfe07 (image=quay.io/libpod/testimage:20210610, name=nopull-nopull, pod_id=91785d133eb8b7e7387a18d990d58d828dd86152a46ba5e748f7873a554f8258, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, app=test, io.containers.autoupdate=registry) Jan 07 09:10:22 managed-node2 podman[12899]: 2025-01-07 09:10:22.268616674 -0500 EST m=+3.413553577 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610 Jan 07 09:10:22 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 07 09:10:25 managed-node2 python3.12[13229]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 07 09:10:26 managed-node2 python3.12[13366]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 07 09:10:29 managed-node2 python3.12[13499]: ansible-ansible.legacy.dnf Invoked with name=['firewalld'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 07 09:10:30 managed-node2 python3.12[13631]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False scope=system no_block=False state=None enabled=None force=None Jan 07 09:10:30 managed-node2 python3.12[13764]: ansible-ansible.legacy.systemd Invoked with name=firewalld state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Jan 07 09:10:31 managed-node2 python3.12[13897]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['15001-15003/tcp'] permanent=True runtime=True state=enabled __report_changed=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Jan 07 09:10:34 managed-node2 python3.12[14028]: ansible-ansible.legacy.dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 07 09:10:34 managed-node2 python3.12[14160]: ansible-ansible.legacy.dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 07 09:10:35 managed-node2 python3.12[14292]: ansible-setup Invoked with filter=['ansible_selinux'] gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d Jan 07 09:10:37 managed-node2 python3.12[14452]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['15001-15003'] proto=tcp setype=http_port_t state=present local=False ignore_selinux_state=False reload=True Jan 07 09:10:38 managed-node2 python3.12[14583]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked Jan 07 09:10:43 managed-node2 python3.12[14714]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 07 09:10:46 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 07 09:10:46 managed-node2 podman[14856]: 2025-01-07 09:10:46.863689978 -0500 EST m=+0.679104722 image pull-error quay.io/linux-system-roles/this_is_a_bogus_image:latest initializing source docker://quay.io/linux-system-roles/this_is_a_bogus_image:latest: reading manifest latest in quay.io/linux-system-roles/this_is_a_bogus_image: unauthorized: access to the requested resource is not authorized Jan 07 09:10:46 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 07 09:10:47 managed-node2 python3.12[14994]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/bogus.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 07 09:10:47 managed-node2 python3.12[15125]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 07 09:10:48 managed-node2 python3.12[15256]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/ansible-kubernetes.d/bogus.yml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 07 09:10:48 managed-node2 python3.12[15361]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/ansible-kubernetes.d/bogus.yml owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1736259047.9635375-10894-147878216883290/.source.yml _original_basename=.xwaxfal5 follow=False checksum=f8266a972ed3be7e204d2a67883fe3a22b8dbf18 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jan 07 09:10:48 managed-node2 python3.12[15492]: ansible-containers.podman.podman_play Invoked with state=created kube_file=/etc/containers/ansible-kubernetes.d/bogus.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Jan 07 09:10:49 managed-node2 systemd[1]: Created slice machine-libpod_pod_810a994e879b7a421373a2b9449288a45860023e760e45288ba78e0b6b1d2922.slice - cgroup machine-libpod_pod_810a994e879b7a421373a2b9449288a45860023e760e45288ba78e0b6b1d2922.slice. ░░ Subject: A start job for unit machine-libpod_pod_810a994e879b7a421373a2b9449288a45860023e760e45288ba78e0b6b1d2922.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit machine-libpod_pod_810a994e879b7a421373a2b9449288a45860023e760e45288ba78e0b6b1d2922.slice has finished successfully. ░░ ░░ The job identifier is 1981. Jan 07 09:10:49 managed-node2 podman[15499]: 2025-01-07 09:10:49.073964524 -0500 EST m=+0.056881726 container create 546ba5ccfdbfa6bd7e02424d8544a031d64add65b6d94a20934330e1c6444191 (image=localhost/podman-pause:5.3.1-1733097600, name=810a994e879b-infra, pod_id=810a994e879b7a421373a2b9449288a45860023e760e45288ba78e0b6b1d2922, io.buildah.version=1.38.0) Jan 07 09:10:49 managed-node2 podman[15499]: 2025-01-07 09:10:49.081819064 -0500 EST m=+0.064736177 pod create 810a994e879b7a421373a2b9449288a45860023e760e45288ba78e0b6b1d2922 (image=, name=bogus) Jan 07 09:10:49 managed-node2 podman[15499]: 2025-01-07 09:10:49.33198025 -0500 EST m=+0.314897357 image pull-error quay.io/linux-system-roles/this_is_a_bogus_image:latest initializing source docker://quay.io/linux-system-roles/this_is_a_bogus_image:latest: reading manifest latest in quay.io/linux-system-roles/this_is_a_bogus_image: unauthorized: access to the requested resource is not authorized Jan 07 09:10:49 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 07 09:10:51 managed-node2 python3.12[15768]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 07 09:10:53 managed-node2 python3.12[15906]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 07 09:10:56 managed-node2 python3.12[16039]: ansible-ansible.legacy.dnf Invoked with name=['firewalld'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 07 09:10:58 managed-node2 python3.12[16171]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False scope=system no_block=False state=None enabled=None force=None Jan 07 09:10:58 managed-node2 python3.12[16304]: ansible-ansible.legacy.systemd Invoked with name=firewalld state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Jan 07 09:10:59 managed-node2 python3.12[16437]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['15001-15003/tcp'] permanent=True runtime=True state=enabled __report_changed=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Jan 07 09:11:01 managed-node2 python3.12[16568]: ansible-ansible.legacy.dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 07 09:11:02 managed-node2 python3.12[16700]: ansible-ansible.legacy.dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 07 09:11:03 managed-node2 python3.12[16832]: ansible-setup Invoked with filter=['ansible_selinux'] gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d Jan 07 09:11:05 managed-node2 python3.12[16992]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['15001-15003'] proto=tcp setype=http_port_t state=present local=False ignore_selinux_state=False reload=True Jan 07 09:11:06 managed-node2 python3.12[17123]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked Jan 07 09:11:11 managed-node2 python3.12[17254]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 07 09:11:12 managed-node2 python3.12[17387]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/nopull.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 07 09:11:13 managed-node2 python3.12[17519]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\x2dkubernetes.d-nopull.yml.service scope=system state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None Jan 07 09:11:14 managed-node2 python3.12[17652]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/nopull.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 07 09:11:15 managed-node2 python3.12[17785]: ansible-containers.podman.podman_play Invoked with state=absent kube_file=/etc/containers/ansible-kubernetes.d/nopull.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Jan 07 09:11:15 managed-node2 python3.12[17785]: ansible-containers.podman.podman_play version: 5.3.1, kube file /etc/containers/ansible-kubernetes.d/nopull.yml Jan 07 09:11:15 managed-node2 podman[17792]: 2025-01-07 09:11:15.456602831 -0500 EST m=+0.025755349 pod stop 91785d133eb8b7e7387a18d990d58d828dd86152a46ba5e748f7873a554f8258 (image=, name=nopull) Jan 07 09:11:15 managed-node2 systemd[1]: Removed slice machine-libpod_pod_91785d133eb8b7e7387a18d990d58d828dd86152a46ba5e748f7873a554f8258.slice - cgroup machine-libpod_pod_91785d133eb8b7e7387a18d990d58d828dd86152a46ba5e748f7873a554f8258.slice. ░░ Subject: A stop job for unit machine-libpod_pod_91785d133eb8b7e7387a18d990d58d828dd86152a46ba5e748f7873a554f8258.slice has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit machine-libpod_pod_91785d133eb8b7e7387a18d990d58d828dd86152a46ba5e748f7873a554f8258.slice has finished. ░░ ░░ The job identifier is 1987 and the job result is done. Jan 07 09:11:15 managed-node2 podman[17792]: 2025-01-07 09:11:15.495686494 -0500 EST m=+0.064838961 container remove c5e812091842fdd1fa646b501e8c221e3bf0b37fb574017d5d0fd9c402fcfe07 (image=quay.io/libpod/testimage:20210610, name=nopull-nopull, pod_id=91785d133eb8b7e7387a18d990d58d828dd86152a46ba5e748f7873a554f8258, io.buildah.version=1.21.0, io.containers.autoupdate=registry, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jan 07 09:11:15 managed-node2 podman[17792]: 2025-01-07 09:11:15.642167684 -0500 EST m=+0.211320178 container remove 53850f64337353052fb00ced894962509bb152080d70aedec878c74e242d2e61 (image=localhost/podman-pause:5.3.1-1733097600, name=91785d133eb8-infra, pod_id=91785d133eb8b7e7387a18d990d58d828dd86152a46ba5e748f7873a554f8258, io.buildah.version=1.38.0) Jan 07 09:11:15 managed-node2 podman[17792]: 2025-01-07 09:11:15.651587874 -0500 EST m=+0.220740333 pod remove 91785d133eb8b7e7387a18d990d58d828dd86152a46ba5e748f7873a554f8258 (image=, name=nopull) Jan 07 09:11:15 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 07 09:11:16 managed-node2 python3.12[17932]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d/nopull.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 07 09:11:16 managed-node2 python3.12[18063]: ansible-ansible.legacy.command Invoked with _raw_params=podman image prune -f _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 07 09:11:16 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 07 09:11:19 managed-node2 python3.12[18333]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 07 09:11:20 managed-node2 python3.12[18470]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 07 09:11:24 managed-node2 python3.12[18603]: ansible-ansible.legacy.dnf Invoked with name=['firewalld'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 07 09:11:26 managed-node2 python3.12[18735]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False scope=system no_block=False state=None enabled=None force=None Jan 07 09:11:26 managed-node2 python3.12[18868]: ansible-ansible.legacy.systemd Invoked with name=firewalld state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Jan 07 09:11:27 managed-node2 python3.12[19001]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['15001-15003/tcp'] permanent=True runtime=True state=enabled __report_changed=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Jan 07 09:11:30 managed-node2 python3.12[19132]: ansible-ansible.legacy.dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 07 09:11:31 managed-node2 python3.12[19264]: ansible-ansible.legacy.dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 07 09:11:32 managed-node2 python3.12[19396]: ansible-setup Invoked with filter=['ansible_selinux'] gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d Jan 07 09:11:34 managed-node2 python3.12[19556]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['15001-15003'] proto=tcp setype=http_port_t state=present local=False ignore_selinux_state=False reload=True Jan 07 09:11:35 managed-node2 python3.12[19687]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked Jan 07 09:11:39 managed-node2 python3.12[19818]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 07 09:11:40 managed-node2 python3.12[19951]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/bogus.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 07 09:11:41 managed-node2 python3.12[20083]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\x2dkubernetes.d-bogus.yml.service scope=system state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None Jan 07 09:11:42 managed-node2 python3.12[20216]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/bogus.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 07 09:11:43 managed-node2 python3.12[20349]: ansible-containers.podman.podman_play Invoked with state=absent kube_file=/etc/containers/ansible-kubernetes.d/bogus.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Jan 07 09:11:43 managed-node2 python3.12[20349]: ansible-containers.podman.podman_play version: 5.3.1, kube file /etc/containers/ansible-kubernetes.d/bogus.yml Jan 07 09:11:43 managed-node2 podman[20357]: 2025-01-07 09:11:43.10249661 -0500 EST m=+0.025715037 pod stop 810a994e879b7a421373a2b9449288a45860023e760e45288ba78e0b6b1d2922 (image=, name=bogus) Jan 07 09:11:43 managed-node2 systemd[1]: Removed slice machine-libpod_pod_810a994e879b7a421373a2b9449288a45860023e760e45288ba78e0b6b1d2922.slice - cgroup machine-libpod_pod_810a994e879b7a421373a2b9449288a45860023e760e45288ba78e0b6b1d2922.slice. ░░ Subject: A stop job for unit machine-libpod_pod_810a994e879b7a421373a2b9449288a45860023e760e45288ba78e0b6b1d2922.slice has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit machine-libpod_pod_810a994e879b7a421373a2b9449288a45860023e760e45288ba78e0b6b1d2922.slice has finished. ░░ ░░ The job identifier is 1989 and the job result is done. Jan 07 09:11:43 managed-node2 podman[20357]: 2025-01-07 09:11:43.136735836 -0500 EST m=+0.059954162 container remove 546ba5ccfdbfa6bd7e02424d8544a031d64add65b6d94a20934330e1c6444191 (image=localhost/podman-pause:5.3.1-1733097600, name=810a994e879b-infra, pod_id=810a994e879b7a421373a2b9449288a45860023e760e45288ba78e0b6b1d2922, io.buildah.version=1.38.0) Jan 07 09:11:43 managed-node2 podman[20357]: 2025-01-07 09:11:43.147017338 -0500 EST m=+0.070235639 pod remove 810a994e879b7a421373a2b9449288a45860023e760e45288ba78e0b6b1d2922 (image=, name=bogus) Jan 07 09:11:43 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 07 09:11:43 managed-node2 python3.12[20497]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d/bogus.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 07 09:11:44 managed-node2 python3.12[20628]: ansible-ansible.legacy.command Invoked with _raw_params=podman image prune -f _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 07 09:11:44 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 07 09:11:47 managed-node2 python3.12[20897]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 07 09:11:49 managed-node2 python3.12[21034]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 07 09:11:52 managed-node2 python3.12[21167]: ansible-ansible.legacy.dnf Invoked with name=['firewalld'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 07 09:11:54 managed-node2 python3.12[21299]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False scope=system no_block=False state=None enabled=None force=None Jan 07 09:11:55 managed-node2 python3.12[21432]: ansible-ansible.legacy.systemd Invoked with name=firewalld state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Jan 07 09:11:56 managed-node2 python3.12[21565]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['15001-15003/tcp'] permanent=True runtime=True state=enabled __report_changed=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Jan 07 09:11:58 managed-node2 python3.12[21696]: ansible-ansible.legacy.dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 07 09:11:59 managed-node2 python3.12[21828]: ansible-ansible.legacy.dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 07 09:12:00 managed-node2 python3.12[21960]: ansible-setup Invoked with filter=['ansible_selinux'] gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d Jan 07 09:12:02 managed-node2 python3.12[22120]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['15001-15003'] proto=tcp setype=http_port_t state=present local=False ignore_selinux_state=False reload=True Jan 07 09:12:02 managed-node2 python3.12[22251]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked Jan 07 09:12:07 managed-node2 python3.12[22382]: ansible-getent Invoked with database=passwd key=podman_basic_user fail_key=False service=None split=None Jan 07 09:12:08 managed-node2 python3.12[22514]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 07 09:12:08 managed-node2 python3.12[22647]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids podman_basic_user _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 07 09:12:09 managed-node2 python3.12[22779]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g podman_basic_user _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 07 09:12:10 managed-node2 python3.12[22911]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 07 09:12:11 managed-node2 python3.12[23043]: ansible-ansible.legacy.command Invoked with creates=/var/lib/systemd/linger/podman_basic_user _raw_params=loginctl enable-linger podman_basic_user _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None removes=None stdin=None Jan 07 09:12:11 managed-node2 systemd[1]: Created slice user-3001.slice - User Slice of UID 3001. ░░ Subject: A start job for unit user-3001.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user-3001.slice has finished successfully. ░░ ░░ The job identifier is 2069. Jan 07 09:12:11 managed-node2 systemd[1]: Starting user-runtime-dir@3001.service - User Runtime Directory /run/user/3001... ░░ Subject: A start job for unit user-runtime-dir@3001.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user-runtime-dir@3001.service has begun execution. ░░ ░░ The job identifier is 1991. Jan 07 09:12:11 managed-node2 systemd[1]: Finished user-runtime-dir@3001.service - User Runtime Directory /run/user/3001. ░░ Subject: A start job for unit user-runtime-dir@3001.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user-runtime-dir@3001.service has finished successfully. ░░ ░░ The job identifier is 1991. Jan 07 09:12:11 managed-node2 systemd[4541]: Created slice background.slice - User Background Tasks Slice. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 14. Jan 07 09:12:11 managed-node2 systemd[1]: Starting user@3001.service - User Manager for UID 3001... ░░ Subject: A start job for unit user@3001.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user@3001.service has begun execution. ░░ ░░ The job identifier is 2071. Jan 07 09:12:11 managed-node2 systemd[4541]: Starting systemd-tmpfiles-clean.service - Cleanup of User's Temporary Files and Directories... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 13. Jan 07 09:12:11 managed-node2 systemd[4541]: Finished systemd-tmpfiles-clean.service - Cleanup of User's Temporary Files and Directories. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 13. Jan 07 09:12:11 managed-node2 systemd-logind[657]: New session 7 of user podman_basic_user. ░░ Subject: A new session 7 has been created for user podman_basic_user ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A new session with the ID 7 has been created for the user podman_basic_user. ░░ ░░ The leading process of the session is 23047. Jan 07 09:12:11 managed-node2 (systemd)[23047]: pam_unix(systemd-user:session): session opened for user podman_basic_user(uid=3001) by podman_basic_user(uid=0) Jan 07 09:12:11 managed-node2 systemd[23047]: Queued start job for default target default.target. Jan 07 09:12:11 managed-node2 systemd[23047]: Created slice app.slice - User Application Slice. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 6. Jan 07 09:12:11 managed-node2 systemd[23047]: Started grub-boot-success.timer - Mark boot as successful after the user session has run 2 minutes. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 10. Jan 07 09:12:11 managed-node2 systemd[23047]: Started systemd-tmpfiles-clean.timer - Daily Cleanup of User's Temporary Directories. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 9. Jan 07 09:12:11 managed-node2 systemd[23047]: Reached target paths.target - Paths. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 3. Jan 07 09:12:11 managed-node2 systemd[23047]: Reached target timers.target - Timers. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 8. Jan 07 09:12:11 managed-node2 systemd[23047]: Starting dbus.socket - D-Bus User Message Bus Socket... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 12. Jan 07 09:12:11 managed-node2 systemd[23047]: Starting systemd-tmpfiles-setup.service - Create User Files and Directories... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 5. Jan 07 09:12:11 managed-node2 systemd[23047]: Finished systemd-tmpfiles-setup.service - Create User Files and Directories. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 5. Jan 07 09:12:11 managed-node2 systemd[23047]: Listening on dbus.socket - D-Bus User Message Bus Socket. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 12. Jan 07 09:12:11 managed-node2 systemd[23047]: Reached target sockets.target - Sockets. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 11. Jan 07 09:12:11 managed-node2 systemd[23047]: Reached target basic.target - Basic System. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 2. Jan 07 09:12:11 managed-node2 systemd[23047]: Reached target default.target - Main User Target. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 1. Jan 07 09:12:11 managed-node2 systemd[23047]: Startup finished in 69ms. ░░ Subject: User manager start-up is now complete ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The user manager instance for user 3001 has been started. All services queued ░░ for starting have been started. Note that other services might still be starting ░░ up or be started at any later time. ░░ ░░ Startup of the manager took 69440 microseconds. Jan 07 09:12:11 managed-node2 systemd[1]: Started user@3001.service - User Manager for UID 3001. ░░ Subject: A start job for unit user@3001.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user@3001.service has finished successfully. ░░ ░░ The job identifier is 2071. Jan 07 09:12:12 managed-node2 python3.12[23196]: ansible-file Invoked with path=/tmp/lsr_kom0gs4j_podman/httpd1 state=directory owner=podman_basic_user group=3001 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Jan 07 09:12:13 managed-node2 python3.12[23327]: ansible-file Invoked with path=/tmp/lsr_kom0gs4j_podman/httpd1-create state=directory owner=podman_basic_user group=3001 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Jan 07 09:12:13 managed-node2 sudo[23500]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ijejrcautuquyjfzlowgygnnvlvpkaqf ; XDG_RUNTIME_DIR=/run/user/3001 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736259133.298375-15090-71630362262234/AnsiballZ_podman_image.py' Jan 07 09:12:13 managed-node2 sudo[23500]: pam_systemd(sudo:session): New sd-bus connection (system-bus-pam-systemd-23500) opened. Jan 07 09:12:13 managed-node2 sudo[23500]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0) Jan 07 09:12:13 managed-node2 systemd[23047]: Created slice session.slice - User Core Session Slice. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 18. Jan 07 09:12:13 managed-node2 systemd[23047]: Starting dbus-broker.service - D-Bus User Message Bus... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 13. Jan 07 09:12:13 managed-node2 dbus-broker-launch[23523]: Policy to allow eavesdropping in /usr/share/dbus-1/session.conf +31: Eavesdropping is deprecated and ignored Jan 07 09:12:13 managed-node2 dbus-broker-launch[23523]: Policy to allow eavesdropping in /usr/share/dbus-1/session.conf +33: Eavesdropping is deprecated and ignored Jan 07 09:12:13 managed-node2 systemd[23047]: Started dbus-broker.service - D-Bus User Message Bus. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 13. Jan 07 09:12:13 managed-node2 dbus-broker-launch[23523]: Ready Jan 07 09:12:13 managed-node2 systemd[23047]: Created slice user.slice - Slice /user. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 20. Jan 07 09:12:13 managed-node2 systemd[23047]: Started podman-23511.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 19. Jan 07 09:12:13 managed-node2 systemd[23047]: Started podman-pause-36ce1bf5.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 23. Jan 07 09:12:14 managed-node2 systemd[23047]: Started podman-23527.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 27. Jan 07 09:12:15 managed-node2 systemd[23047]: Started podman-23552.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 31. Jan 07 09:12:15 managed-node2 sudo[23500]: pam_unix(sudo:session): session closed for user podman_basic_user Jan 07 09:12:16 managed-node2 python3.12[23690]: ansible-stat Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 07 09:12:16 managed-node2 python3.12[23821]: ansible-file Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d state=directory owner=podman_basic_user group=3001 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 07 09:12:17 managed-node2 python3.12[23952]: ansible-ansible.legacy.stat Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 07 09:12:17 managed-node2 python3.12[24057]: ansible-ansible.legacy.copy Invoked with dest=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml owner=podman_basic_user group=3001 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1736259136.8938768-15263-66515330324667/.source.yml _original_basename=.7ihyk1_p follow=False checksum=906ffc495c17b7b3c2713751534afab1dd238226 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jan 07 09:12:17 managed-node2 sudo[24230]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-wteuofgsqkravhujeqqglijpjhgodjqi ; XDG_RUNTIME_DIR=/run/user/3001 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736259137.6223898-15310-111121950013799/AnsiballZ_podman_play.py' Jan 07 09:12:17 managed-node2 sudo[24230]: pam_systemd(sudo:session): New sd-bus connection (system-bus-pam-systemd-24230) opened. Jan 07 09:12:17 managed-node2 sudo[24230]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0) Jan 07 09:12:17 managed-node2 python3.12[24233]: ansible-containers.podman.podman_play Invoked with state=started debug=True log_level=debug kube_file=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None quiet=None recreate=None userns=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Jan 07 09:12:18 managed-node2 systemd[23047]: Started podman-24240.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 35. Jan 07 09:12:18 managed-node2 systemd[23047]: Created slice user-libpod_pod_7ee39644f44c212ed8c9468fbb32c7003e71e8cf99bc08bae81d105ea55f94f3.slice - cgroup user-libpod_pod_7ee39644f44c212ed8c9468fbb32c7003e71e8cf99bc08bae81d105ea55f94f3.slice. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 39. Jan 07 09:12:18 managed-node2 kernel: tun: Universal TUN/TAP device driver, 1.6 Jan 07 09:12:18 managed-node2 systemd[23047]: Started rootless-netns-177d061c.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 43. Jan 07 09:12:18 managed-node2 kernel: bridge: filtering via arp/ip/ip6tables is no longer available by default. Update your scripts to load br_netfilter if you need this. Jan 07 09:12:18 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Jan 07 09:12:18 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Jan 07 09:12:18 managed-node2 kernel: veth0: entered allmulticast mode Jan 07 09:12:18 managed-node2 kernel: veth0: entered promiscuous mode Jan 07 09:12:18 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Jan 07 09:12:18 managed-node2 kernel: podman1: port 1(veth0) entered forwarding state Jan 07 09:12:18 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Jan 07 09:12:18 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Jan 07 09:12:18 managed-node2 kernel: podman1: port 1(veth0) entered forwarding state Jan 07 09:12:18 managed-node2 systemd[23047]: Started run-r1a32890a0caa4de8a1ca84865a041cdc.scope - /usr/libexec/podman/aardvark-dns --config /run/user/3001/containers/networks/aardvark-dns -p 53 run. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 47. Jan 07 09:12:18 managed-node2 aardvark-dns[24322]: starting aardvark on a child with pid 24323 Jan 07 09:12:18 managed-node2 aardvark-dns[24323]: Successfully parsed config Jan 07 09:12:18 managed-node2 aardvark-dns[24323]: Listen v4 ip {"podman-default-kube-network": [10.89.0.1]} Jan 07 09:12:18 managed-node2 aardvark-dns[24323]: Listen v6 ip {} Jan 07 09:12:18 managed-node2 aardvark-dns[24323]: Using the following upstream servers: [169.254.1.1:53, 10.29.169.13:53, 10.29.170.12:53] Jan 07 09:12:18 managed-node2 conmon[24338]: conmon bce005bb46a5e13ccfec : failed to write to /proc/self/oom_score_adj: Permission denied Jan 07 09:12:18 managed-node2 systemd[23047]: Started libpod-conmon-bce005bb46a5e13ccfecf7373d40775f878fcdddc0a8a4651d9fd838ef5979d0.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 51. Jan 07 09:12:18 managed-node2 conmon[24339]: conmon bce005bb46a5e13ccfec : addr{sun_family=AF_UNIX, sun_path=/proc/self/fd/14/attach} Jan 07 09:12:18 managed-node2 conmon[24339]: conmon bce005bb46a5e13ccfec : terminal_ctrl_fd: 14 Jan 07 09:12:18 managed-node2 conmon[24339]: conmon bce005bb46a5e13ccfec : winsz read side: 17, winsz write side: 18 Jan 07 09:12:18 managed-node2 systemd[23047]: Started libpod-bce005bb46a5e13ccfecf7373d40775f878fcdddc0a8a4651d9fd838ef5979d0.scope - libcrun container. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 56. Jan 07 09:12:18 managed-node2 conmon[24339]: conmon bce005bb46a5e13ccfec : container PID: 24341 Jan 07 09:12:18 managed-node2 conmon[24343]: conmon 601607edc0bad5ac48b6 : failed to write to /proc/self/oom_score_adj: Permission denied Jan 07 09:12:18 managed-node2 systemd[23047]: Started libpod-conmon-601607edc0bad5ac48b6a946a7023c40d08491d57765f188c572ad31abeeb41a.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 61. Jan 07 09:12:18 managed-node2 conmon[24344]: conmon 601607edc0bad5ac48b6 : addr{sun_family=AF_UNIX, sun_path=/proc/self/fd/13/attach} Jan 07 09:12:18 managed-node2 conmon[24344]: conmon 601607edc0bad5ac48b6 : terminal_ctrl_fd: 13 Jan 07 09:12:18 managed-node2 conmon[24344]: conmon 601607edc0bad5ac48b6 : winsz read side: 16, winsz write side: 17 Jan 07 09:12:18 managed-node2 systemd[23047]: Started libpod-601607edc0bad5ac48b6a946a7023c40d08491d57765f188c572ad31abeeb41a.scope - libcrun container. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 66. Jan 07 09:12:18 managed-node2 conmon[24344]: conmon 601607edc0bad5ac48b6 : container PID: 24347 Jan 07 09:12:18 managed-node2 python3.12[24233]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE command: /bin/podman play kube --start=true --log-level=debug /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml Jan 07 09:12:18 managed-node2 python3.12[24233]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stdout: Pod: 7ee39644f44c212ed8c9468fbb32c7003e71e8cf99bc08bae81d105ea55f94f3 Container: 601607edc0bad5ac48b6a946a7023c40d08491d57765f188c572ad31abeeb41a Jan 07 09:12:18 managed-node2 python3.12[24233]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stderr: time="2025-01-07T09:12:18-05:00" level=info msg="/bin/podman filtering at log level debug" time="2025-01-07T09:12:18-05:00" level=debug msg="Called kube.PersistentPreRunE(/bin/podman play kube --start=true --log-level=debug /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml)" time="2025-01-07T09:12:18-05:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" time="2025-01-07T09:12:18-05:00" level=info msg="Using sqlite as database backend" time="2025-01-07T09:12:18-05:00" level=debug msg="systemd-logind: Unknown object '/'." time="2025-01-07T09:12:18-05:00" level=debug msg="Using graph driver overlay" time="2025-01-07T09:12:18-05:00" level=debug msg="Using graph root /home/podman_basic_user/.local/share/containers/storage" time="2025-01-07T09:12:18-05:00" level=debug msg="Using run root /run/user/3001/containers" time="2025-01-07T09:12:18-05:00" level=debug msg="Using static dir /home/podman_basic_user/.local/share/containers/storage/libpod" time="2025-01-07T09:12:18-05:00" level=debug msg="Using tmp dir /run/user/3001/libpod/tmp" time="2025-01-07T09:12:18-05:00" level=debug msg="Using volume path /home/podman_basic_user/.local/share/containers/storage/volumes" time="2025-01-07T09:12:18-05:00" level=debug msg="Using transient store: false" time="2025-01-07T09:12:18-05:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" time="2025-01-07T09:12:18-05:00" level=debug msg="Cached value indicated that overlay is supported" time="2025-01-07T09:12:18-05:00" level=debug msg="Cached value indicated that overlay is supported" time="2025-01-07T09:12:18-05:00" level=debug msg="Cached value indicated that metacopy is not being used" time="2025-01-07T09:12:18-05:00" level=debug msg="Cached value indicated that native-diff is usable" time="2025-01-07T09:12:18-05:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=true, usingMetacopy=false" time="2025-01-07T09:12:18-05:00" level=debug msg="Initializing event backend file" time="2025-01-07T09:12:18-05:00" level=debug msg="Configured OCI runtime crun-vm initialization failed: no valid executable found for OCI runtime crun-vm: invalid argument" time="2025-01-07T09:12:18-05:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" time="2025-01-07T09:12:18-05:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" time="2025-01-07T09:12:18-05:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" time="2025-01-07T09:12:18-05:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" time="2025-01-07T09:12:18-05:00" level=debug msg="Configured OCI runtime runc initialization failed: no valid executable found for OCI runtime runc: invalid argument" time="2025-01-07T09:12:18-05:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" time="2025-01-07T09:12:18-05:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" time="2025-01-07T09:12:18-05:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" time="2025-01-07T09:12:18-05:00" level=debug msg="Using OCI runtime \"/usr/bin/crun\"" time="2025-01-07T09:12:18-05:00" level=info msg="Setting parallel job count to 7" time="2025-01-07T09:12:18-05:00" level=debug msg="Successfully loaded 1 networks" time="2025-01-07T09:12:18-05:00" level=debug msg="found free device name podman1" time="2025-01-07T09:12:18-05:00" level=debug msg="found free ipv4 network subnet 10.89.0.0/24" time="2025-01-07T09:12:18-05:00" level=debug msg="Looking up image \"localhost/podman-pause:5.3.1-1733097600\" in local containers storage" time="2025-01-07T09:12:18-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-01-07T09:12:18-05:00" level=debug msg="Trying \"localhost/podman-pause:5.3.1-1733097600\" ..." time="2025-01-07T09:12:18-05:00" level=debug msg="reference \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:5.3.1-1733097600\" does not resolve to an image ID" time="2025-01-07T09:12:18-05:00" level=debug msg="Trying \"localhost/podman-pause:5.3.1-1733097600\" ..." time="2025-01-07T09:12:18-05:00" level=debug msg="reference \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:5.3.1-1733097600\" does not resolve to an image ID" time="2025-01-07T09:12:18-05:00" level=debug msg="Trying \"localhost/podman-pause:5.3.1-1733097600\" ..." time="2025-01-07T09:12:18-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-01-07T09:12:18-05:00" level=debug msg="FROM \"scratch\"" time="2025-01-07T09:12:18-05:00" level=debug msg="Cached value indicated that idmapped mounts for overlay are not supported" time="2025-01-07T09:12:18-05:00" level=debug msg="Check for idmapped mounts support " time="2025-01-07T09:12:18-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-01-07T09:12:18-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-01-07T09:12:18-05:00" level=debug msg="overlay: test mount indicated that volatile is being used" time="2025-01-07T09:12:18-05:00" level=debug msg="overlay: mount_data=lowerdir=/home/podman_basic_user/.local/share/containers/storage/overlay/95810cdc96394eb6d5d7ed02b34581c75ca4d922a33b130ac315144166ba184c/empty,upperdir=/home/podman_basic_user/.local/share/containers/storage/overlay/95810cdc96394eb6d5d7ed02b34581c75ca4d922a33b130ac315144166ba184c/diff,workdir=/home/podman_basic_user/.local/share/containers/storage/overlay/95810cdc96394eb6d5d7ed02b34581c75ca4d922a33b130ac315144166ba184c/work,userxattr,volatile,context=\"system_u:object_r:container_file_t:s0:c4,c889\"" time="2025-01-07T09:12:18-05:00" level=debug msg="Container ID: dfc458c14bc3d2dd3ad02df721c87c4ae8579bb15ec61b416f3dcd606503d0b9" time="2025-01-07T09:12:18-05:00" level=debug msg="Parsed Step: {Env:[PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin] Command:copy Args:[/usr/libexec/podman/catatonit /catatonit] Flags:[] Attrs:map[] Message:COPY /usr/libexec/podman/catatonit /catatonit Heredocs:[] Original:COPY /usr/libexec/podman/catatonit /catatonit}" time="2025-01-07T09:12:18-05:00" level=debug msg="COPY []string(nil), imagebuilder.Copy{FromFS:false, From:\"\", Src:[]string{\"/usr/libexec/podman/catatonit\"}, Dest:\"/catatonit\", Download:false, Chown:\"\", Chmod:\"\", Checksum:\"\", Files:[]imagebuilder.File(nil), KeepGitDir:false, Link:false, Parents:false, Excludes:[]string(nil)}" time="2025-01-07T09:12:18-05:00" level=debug msg="EnsureContainerPath \"/\" (owner \"\", mode 0) in \"dfc458c14bc3d2dd3ad02df721c87c4ae8579bb15ec61b416f3dcd606503d0b9\"" time="2025-01-07T09:12:18-05:00" level=debug msg="added content file:872045bc026632654869cebc14f91fe531969760bf26925e889d9b70527c8b67" time="2025-01-07T09:12:18-05:00" level=debug msg="Parsed Step: {Env:[PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin] Command:entrypoint Args:[/catatonit -P] Flags:[] Attrs:map[json:true] Message:ENTRYPOINT /catatonit -P Heredocs:[] Original:ENTRYPOINT [\"/catatonit\", \"-P\"]}" time="2025-01-07T09:12:18-05:00" level=debug msg="EnsureContainerPath \"/\" (owner \"\", mode 0) in \"dfc458c14bc3d2dd3ad02df721c87c4ae8579bb15ec61b416f3dcd606503d0b9\"" time="2025-01-07T09:12:18-05:00" level=debug msg="COMMIT localhost/podman-pause:5.3.1-1733097600" time="2025-01-07T09:12:18-05:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:5.3.1-1733097600\"" time="2025-01-07T09:12:18-05:00" level=debug msg="COMMIT \"containers-storage:[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:5.3.1-1733097600\"" time="2025-01-07T09:12:18-05:00" level=debug msg="committing image with reference \"containers-storage:[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:5.3.1-1733097600\" is allowed by policy" time="2025-01-07T09:12:18-05:00" level=debug msg="layer list: [\"95810cdc96394eb6d5d7ed02b34581c75ca4d922a33b130ac315144166ba184c\"]" time="2025-01-07T09:12:18-05:00" level=debug msg="using \"/var/tmp/buildah3006773201\" to hold temporary data" time="2025-01-07T09:12:18-05:00" level=debug msg="Tar with options on /home/podman_basic_user/.local/share/containers/storage/overlay/95810cdc96394eb6d5d7ed02b34581c75ca4d922a33b130ac315144166ba184c/diff" time="2025-01-07T09:12:18-05:00" level=debug msg="layer \"95810cdc96394eb6d5d7ed02b34581c75ca4d922a33b130ac315144166ba184c\" size is 699392 bytes, uncompressed digest sha256:83362ec78cab9ea69b1fd7cce8c7ba3164dab292c6df41c7dbcb6971d43d89e6, possibly-compressed digest sha256:83362ec78cab9ea69b1fd7cce8c7ba3164dab292c6df41c7dbcb6971d43d89e6" time="2025-01-07T09:12:18-05:00" level=debug msg="OCIv1 config = {\"created\":\"2025-01-07T14:12:18.225948604Z\",\"architecture\":\"amd64\",\"os\":\"linux\",\"config\":{\"Env\":[\"PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin\"],\"Entrypoint\":[\"/catatonit\",\"-P\"],\"WorkingDir\":\"/\",\"Labels\":{\"io.buildah.version\":\"1.38.0\"}},\"rootfs\":{\"type\":\"layers\",\"diff_ids\":[\"sha256:83362ec78cab9ea69b1fd7cce8c7ba3164dab292c6df41c7dbcb6971d43d89e6\"]},\"history\":[{\"created\":\"2025-01-07T14:12:18.195886547Z\",\"created_by\":\"/bin/sh -c #(nop) COPY file:872045bc026632654869cebc14f91fe531969760bf26925e889d9b70527c8b67 in /catatonit \",\"empty_layer\":true},{\"created\":\"2025-01-07T14:12:18.228836521Z\",\"created_by\":\"/bin/sh -c #(nop) ENTRYPOINT [\\\"/catatonit\\\", \\\"-P\\\"]\"}]}" time="2025-01-07T09:12:18-05:00" level=debug msg="OCIv1 manifest = {\"schemaVersion\":2,\"mediaType\":\"application/vnd.oci.image.manifest.v1+json\",\"config\":{\"mediaType\":\"application/vnd.oci.image.config.v1+json\",\"digest\":\"sha256:5695e5c8db6bfee3f15e4db1f738ee3d7b797567d0f04ac6a3333ae36b2d1612\",\"size\":685},\"layers\":[{\"mediaType\":\"application/vnd.oci.image.layer.v1.tar\",\"digest\":\"sha256:83362ec78cab9ea69b1fd7cce8c7ba3164dab292c6df41c7dbcb6971d43d89e6\",\"size\":699392}],\"annotations\":{\"org.opencontainers.image.base.digest\":\"\",\"org.opencontainers.image.base.name\":\"\"}}" time="2025-01-07T09:12:18-05:00" level=debug msg="Docker v2s2 config = {\"created\":\"2025-01-07T14:12:18.225948604Z\",\"container\":\"dfc458c14bc3d2dd3ad02df721c87c4ae8579bb15ec61b416f3dcd606503d0b9\",\"container_config\":{\"Hostname\":\"\",\"Domainname\":\"\",\"User\":\"\",\"AttachStdin\":false,\"AttachStdout\":false,\"AttachStderr\":false,\"Tty\":false,\"OpenStdin\":false,\"StdinOnce\":false,\"Env\":[\"PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin\"],\"Cmd\":null,\"Image\":\"\",\"Volumes\":{},\"WorkingDir\":\"/\",\"Entrypoint\":[\"/catatonit\",\"-P\"],\"OnBuild\":[],\"Labels\":{\"io.buildah.version\":\"1.38.0\"}},\"config\":{\"Hostname\":\"\",\"Domainname\":\"\",\"User\":\"\",\"AttachStdin\":false,\"AttachStdout\":false,\"AttachStderr\":false,\"Tty\":false,\"OpenStdin\":false,\"StdinOnce\":false,\"Env\":[\"PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin\"],\"Cmd\":null,\"Image\":\"\",\"Volumes\":{},\"WorkingDir\":\"/\",\"Entrypoint\":[\"/catatonit\",\"-P\"],\"OnBuild\":[],\"Labels\":{\"io.buildah.version\":\"1.38.0\"}},\"architecture\":\"amd64\",\"os\":\"linux\",\"rootfs\":{\"type\":\"layers\",\"diff_ids\":[\"sha256:83362ec78cab9ea69b1fd7cce8c7ba3164dab292c6df41c7dbcb6971d43d89e6\"]},\"history\":[{\"created\":\"2025-01-07T14:12:18.195886547Z\",\"created_by\":\"/bin/sh -c #(nop) COPY file:872045bc026632654869cebc14f91fe531969760bf26925e889d9b70527c8b67 in /catatonit \",\"empty_layer\":true},{\"created\":\"2025-01-07T14:12:18.228836521Z\",\"created_by\":\"/bin/sh -c #(nop) ENTRYPOINT [\\\"/catatonit\\\", \\\"-P\\\"]\"}]}" time="2025-01-07T09:12:18-05:00" level=debug msg="Docker v2s2 manifest = {\"schemaVersion\":2,\"mediaType\":\"application/vnd.docker.distribution.manifest.v2+json\",\"config\":{\"mediaType\":\"application/vnd.docker.container.image.v1+json\",\"size\":1348,\"digest\":\"sha256:cb1e3c19c4cf1aab48629f1b13656c0f68172e1cb398ae5aad1fcfaf7f709006\"},\"layers\":[{\"mediaType\":\"application/vnd.docker.image.rootfs.diff.tar\",\"size\":699392,\"digest\":\"sha256:83362ec78cab9ea69b1fd7cce8c7ba3164dab292c6df41c7dbcb6971d43d89e6\"}]}" time="2025-01-07T09:12:18-05:00" level=debug msg="Using SQLite blob info cache at /home/podman_basic_user/.local/share/containers/cache/blob-info-cache-v1.sqlite" time="2025-01-07T09:12:18-05:00" level=debug msg="IsRunningImageAllowed for image containers-storage:" time="2025-01-07T09:12:18-05:00" level=debug msg=" Using transport \"containers-storage\" policy section \"\"" time="2025-01-07T09:12:18-05:00" level=debug msg=" Requirement 0: allowed" time="2025-01-07T09:12:18-05:00" level=debug msg="Overall: allowed" time="2025-01-07T09:12:18-05:00" level=debug msg="start reading config" time="2025-01-07T09:12:18-05:00" level=debug msg="finished reading config" time="2025-01-07T09:12:18-05:00" level=debug msg="Manifest has MIME type application/vnd.oci.image.manifest.v1+json, ordered candidate list [application/vnd.oci.image.manifest.v1+json, application/vnd.docker.distribution.manifest.v2+json, application/vnd.docker.distribution.manifest.v1+prettyjws, application/vnd.docker.distribution.manifest.v1+json]" time="2025-01-07T09:12:18-05:00" level=debug msg="... will first try using the original manifest unmodified" time="2025-01-07T09:12:18-05:00" level=debug msg="Checking if we can reuse blob sha256:83362ec78cab9ea69b1fd7cce8c7ba3164dab292c6df41c7dbcb6971d43d89e6: general substitution = true, compression for MIME type \"application/vnd.oci.image.layer.v1.tar\" = true" time="2025-01-07T09:12:18-05:00" level=debug msg="reading layer \"sha256:83362ec78cab9ea69b1fd7cce8c7ba3164dab292c6df41c7dbcb6971d43d89e6\"" time="2025-01-07T09:12:18-05:00" level=debug msg="No compression detected" time="2025-01-07T09:12:18-05:00" level=debug msg="Using original blob without modification" time="2025-01-07T09:12:18-05:00" level=debug msg="Applying tar in /home/podman_basic_user/.local/share/containers/storage/overlay/83362ec78cab9ea69b1fd7cce8c7ba3164dab292c6df41c7dbcb6971d43d89e6/diff" time="2025-01-07T09:12:18-05:00" level=debug msg="finished reading layer \"sha256:83362ec78cab9ea69b1fd7cce8c7ba3164dab292c6df41c7dbcb6971d43d89e6\"" time="2025-01-07T09:12:18-05:00" level=debug msg="No compression detected" time="2025-01-07T09:12:18-05:00" level=debug msg="Compression change for blob sha256:5695e5c8db6bfee3f15e4db1f738ee3d7b797567d0f04ac6a3333ae36b2d1612 (\"application/vnd.oci.image.config.v1+json\") not supported" time="2025-01-07T09:12:18-05:00" level=debug msg="Using original blob without modification" time="2025-01-07T09:12:18-05:00" level=debug msg="setting image creation date to 2025-01-07 14:12:18.225948604 +0000 UTC" time="2025-01-07T09:12:18-05:00" level=debug msg="created new image ID \"5695e5c8db6bfee3f15e4db1f738ee3d7b797567d0f04ac6a3333ae36b2d1612\" with metadata \"{}\"" time="2025-01-07T09:12:18-05:00" level=debug msg="added name \"localhost/podman-pause:5.3.1-1733097600\" to image \"5695e5c8db6bfee3f15e4db1f738ee3d7b797567d0f04ac6a3333ae36b2d1612\"" time="2025-01-07T09:12:18-05:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:5.3.1-1733097600\"" time="2025-01-07T09:12:18-05:00" level=debug msg="printing final image id \"5695e5c8db6bfee3f15e4db1f738ee3d7b797567d0f04ac6a3333ae36b2d1612\"" time="2025-01-07T09:12:18-05:00" level=debug msg="Pod using bridge network mode" time="2025-01-07T09:12:18-05:00" level=debug msg="Created cgroup path user.slice/user-libpod_pod_7ee39644f44c212ed8c9468fbb32c7003e71e8cf99bc08bae81d105ea55f94f3.slice for parent user.slice and name libpod_pod_7ee39644f44c212ed8c9468fbb32c7003e71e8cf99bc08bae81d105ea55f94f3" time="2025-01-07T09:12:18-05:00" level=debug msg="Created cgroup user.slice/user-libpod_pod_7ee39644f44c212ed8c9468fbb32c7003e71e8cf99bc08bae81d105ea55f94f3.slice" time="2025-01-07T09:12:18-05:00" level=debug msg="Got pod cgroup as user.slice/user-3001.slice/user@3001.service/user.slice/user-libpod_pod_7ee39644f44c212ed8c9468fbb32c7003e71e8cf99bc08bae81d105ea55f94f3.slice" time="2025-01-07T09:12:18-05:00" level=debug msg="Looking up image \"localhost/podman-pause:5.3.1-1733097600\" in local containers storage" time="2025-01-07T09:12:18-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-01-07T09:12:18-05:00" level=debug msg="Trying \"localhost/podman-pause:5.3.1-1733097600\" ..." time="2025-01-07T09:12:18-05:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@5695e5c8db6bfee3f15e4db1f738ee3d7b797567d0f04ac6a3333ae36b2d1612\"" time="2025-01-07T09:12:18-05:00" level=debug msg="Found image \"localhost/podman-pause:5.3.1-1733097600\" as \"localhost/podman-pause:5.3.1-1733097600\" in local containers storage" time="2025-01-07T09:12:18-05:00" level=debug msg="Found image \"localhost/podman-pause:5.3.1-1733097600\" as \"localhost/podman-pause:5.3.1-1733097600\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@5695e5c8db6bfee3f15e4db1f738ee3d7b797567d0f04ac6a3333ae36b2d1612)" time="2025-01-07T09:12:18-05:00" level=debug msg="exporting opaque data as blob \"sha256:5695e5c8db6bfee3f15e4db1f738ee3d7b797567d0f04ac6a3333ae36b2d1612\"" time="2025-01-07T09:12:18-05:00" level=debug msg="Inspecting image 5695e5c8db6bfee3f15e4db1f738ee3d7b797567d0f04ac6a3333ae36b2d1612" time="2025-01-07T09:12:18-05:00" level=debug msg="exporting opaque data as blob \"sha256:5695e5c8db6bfee3f15e4db1f738ee3d7b797567d0f04ac6a3333ae36b2d1612\"" time="2025-01-07T09:12:18-05:00" level=debug msg="Inspecting image 5695e5c8db6bfee3f15e4db1f738ee3d7b797567d0f04ac6a3333ae36b2d1612" time="2025-01-07T09:12:18-05:00" level=debug msg="Inspecting image 5695e5c8db6bfee3f15e4db1f738ee3d7b797567d0f04ac6a3333ae36b2d1612" time="2025-01-07T09:12:18-05:00" level=debug msg="using systemd mode: false" time="2025-01-07T09:12:18-05:00" level=debug msg="setting container name 7ee39644f44c-infra" time="2025-01-07T09:12:18-05:00" level=debug msg="Loading seccomp profile from \"/usr/share/containers/seccomp.json\"" time="2025-01-07T09:12:18-05:00" level=debug msg="Successfully loaded network podman-default-kube-network: &{podman-default-kube-network cafde5e4b8d539fd25d14051143b0ea2f2ee26225bafd0e8d7a632b06c900ecb bridge podman1 2025-01-07 09:12:18.053358397 -0500 EST [{{{10.89.0.0 ffffff00}} 10.89.0.1 }] [] false false true [] map[] map[] map[driver:host-local]}" time="2025-01-07T09:12:18-05:00" level=debug msg="Successfully loaded 2 networks" time="2025-01-07T09:12:18-05:00" level=debug msg="Allocated lock 1 for container bce005bb46a5e13ccfecf7373d40775f878fcdddc0a8a4651d9fd838ef5979d0" time="2025-01-07T09:12:18-05:00" level=debug msg="exporting opaque data as blob \"sha256:5695e5c8db6bfee3f15e4db1f738ee3d7b797567d0f04ac6a3333ae36b2d1612\"" time="2025-01-07T09:12:18-05:00" level=debug msg="Created container \"bce005bb46a5e13ccfecf7373d40775f878fcdddc0a8a4651d9fd838ef5979d0\"" time="2025-01-07T09:12:18-05:00" level=debug msg="Container \"bce005bb46a5e13ccfecf7373d40775f878fcdddc0a8a4651d9fd838ef5979d0\" has work directory \"/home/podman_basic_user/.local/share/containers/storage/overlay-containers/bce005bb46a5e13ccfecf7373d40775f878fcdddc0a8a4651d9fd838ef5979d0/userdata\"" time="2025-01-07T09:12:18-05:00" level=debug msg="Container \"bce005bb46a5e13ccfecf7373d40775f878fcdddc0a8a4651d9fd838ef5979d0\" has run directory \"/run/user/3001/containers/overlay-containers/bce005bb46a5e13ccfecf7373d40775f878fcdddc0a8a4651d9fd838ef5979d0/userdata\"" time="2025-01-07T09:12:18-05:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-01-07T09:12:18-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-01-07T09:12:18-05:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2025-01-07T09:12:18-05:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-07T09:12:18-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-01-07T09:12:18-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2025-01-07T09:12:18-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-07T09:12:18-05:00" level=debug msg="Pulling image quay.io/libpod/testimage:20210610 (policy: missing)" time="2025-01-07T09:12:18-05:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-01-07T09:12:18-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-01-07T09:12:18-05:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2025-01-07T09:12:18-05:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-07T09:12:18-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-01-07T09:12:18-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2025-01-07T09:12:18-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-07T09:12:18-05:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-01-07T09:12:18-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-01-07T09:12:18-05:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2025-01-07T09:12:18-05:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-07T09:12:18-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-01-07T09:12:18-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2025-01-07T09:12:18-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-07T09:12:18-05:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2025-01-07T09:12:18-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-07T09:12:18-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-07T09:12:18-05:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-01-07T09:12:18-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-01-07T09:12:18-05:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2025-01-07T09:12:18-05:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-07T09:12:18-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-01-07T09:12:18-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2025-01-07T09:12:18-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-07T09:12:18-05:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2025-01-07T09:12:18-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-07T09:12:18-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-07T09:12:18-05:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2025-01-07T09:12:18-05:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2025-01-07T09:12:18-05:00" level=debug msg="using systemd mode: false" time="2025-01-07T09:12:18-05:00" level=debug msg="adding container to pod httpd1" time="2025-01-07T09:12:18-05:00" level=debug msg="setting container name httpd1-httpd1" time="2025-01-07T09:12:18-05:00" level=debug msg="Loading seccomp profile from \"/usr/share/containers/seccomp.json\"" time="2025-01-07T09:12:18-05:00" level=info msg="Sysctl net.ipv4.ping_group_range=0 0 ignored in containers.conf, since Network Namespace set to host" time="2025-01-07T09:12:18-05:00" level=debug msg="Adding mount /proc" time="2025-01-07T09:12:18-05:00" level=debug msg="Adding mount /dev" time="2025-01-07T09:12:18-05:00" level=debug msg="Adding mount /dev/pts" time="2025-01-07T09:12:18-05:00" level=debug msg="Adding mount /dev/mqueue" time="2025-01-07T09:12:18-05:00" level=debug msg="Adding mount /sys" time="2025-01-07T09:12:18-05:00" level=debug msg="Adding mount /sys/fs/cgroup" time="2025-01-07T09:12:18-05:00" level=debug msg="Allocated lock 2 for container 601607edc0bad5ac48b6a946a7023c40d08491d57765f188c572ad31abeeb41a" time="2025-01-07T09:12:18-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-07T09:12:18-05:00" level=debug msg="Created container \"601607edc0bad5ac48b6a946a7023c40d08491d57765f188c572ad31abeeb41a\"" time="2025-01-07T09:12:18-05:00" level=debug msg="Container \"601607edc0bad5ac48b6a946a7023c40d08491d57765f188c572ad31abeeb41a\" has work directory \"/home/podman_basic_user/.local/share/containers/storage/overlay-containers/601607edc0bad5ac48b6a946a7023c40d08491d57765f188c572ad31abeeb41a/userdata\"" time="2025-01-07T09:12:18-05:00" level=debug msg="Container \"601607edc0bad5ac48b6a946a7023c40d08491d57765f188c572ad31abeeb41a\" has run directory \"/run/user/3001/containers/overlay-containers/601607edc0bad5ac48b6a946a7023c40d08491d57765f188c572ad31abeeb41a/userdata\"" time="2025-01-07T09:12:18-05:00" level=debug msg="Strongconnecting node bce005bb46a5e13ccfecf7373d40775f878fcdddc0a8a4651d9fd838ef5979d0" time="2025-01-07T09:12:18-05:00" level=debug msg="Pushed bce005bb46a5e13ccfecf7373d40775f878fcdddc0a8a4651d9fd838ef5979d0 onto stack" time="2025-01-07T09:12:18-05:00" level=debug msg="Finishing node bce005bb46a5e13ccfecf7373d40775f878fcdddc0a8a4651d9fd838ef5979d0. Popped bce005bb46a5e13ccfecf7373d40775f878fcdddc0a8a4651d9fd838ef5979d0 off stack" time="2025-01-07T09:12:18-05:00" level=debug msg="Strongconnecting node 601607edc0bad5ac48b6a946a7023c40d08491d57765f188c572ad31abeeb41a" time="2025-01-07T09:12:18-05:00" level=debug msg="Pushed 601607edc0bad5ac48b6a946a7023c40d08491d57765f188c572ad31abeeb41a onto stack" time="2025-01-07T09:12:18-05:00" level=debug msg="Finishing node 601607edc0bad5ac48b6a946a7023c40d08491d57765f188c572ad31abeeb41a. Popped 601607edc0bad5ac48b6a946a7023c40d08491d57765f188c572ad31abeeb41a off stack" time="2025-01-07T09:12:18-05:00" level=debug msg="overlay: mount_data=lowerdir=/home/podman_basic_user/.local/share/containers/storage/overlay/l/PCNV6S4VBI3WTFCEGML2VVFLBY,upperdir=/home/podman_basic_user/.local/share/containers/storage/overlay/2d547bdbebb50560278983b8d7b53c59ce086159f616cc1759969373b402b864/diff,workdir=/home/podman_basic_user/.local/share/containers/storage/overlay/2d547bdbebb50560278983b8d7b53c59ce086159f616cc1759969373b402b864/work,userxattr,context=\"system_u:object_r:container_file_t:s0:c515,c561\"" time="2025-01-07T09:12:18-05:00" level=debug msg="Made network namespace at /run/user/3001/netns/netns-c7bfc11a-f7f2-ab65-e293-fb6f673f11a0 for container bce005bb46a5e13ccfecf7373d40775f878fcdddc0a8a4651d9fd838ef5979d0" time="2025-01-07T09:12:18-05:00" level=debug msg="Mounted container \"bce005bb46a5e13ccfecf7373d40775f878fcdddc0a8a4651d9fd838ef5979d0\" at \"/home/podman_basic_user/.local/share/containers/storage/overlay/2d547bdbebb50560278983b8d7b53c59ce086159f616cc1759969373b402b864/merged\"" time="2025-01-07T09:12:18-05:00" level=debug msg="Created root filesystem for container bce005bb46a5e13ccfecf7373d40775f878fcdddc0a8a4651d9fd838ef5979d0 at /home/podman_basic_user/.local/share/containers/storage/overlay/2d547bdbebb50560278983b8d7b53c59ce086159f616cc1759969373b402b864/merged" time="2025-01-07T09:12:18-05:00" level=debug msg="Creating rootless network namespace at \"/run/user/3001/containers/networks/rootless-netns/rootless-netns\"" time="2025-01-07T09:12:18-05:00" level=debug msg="pasta arguments: --config-net --pid /run/user/3001/containers/networks/rootless-netns/rootless-netns-conn.pid --dns-forward 169.254.1.1 -t none -u none -T none -U none --no-map-gw --quiet --netns /run/user/3001/containers/networks/rootless-netns/rootless-netns --map-guest-addr 169.254.1.2" time="2025-01-07T09:12:18-05:00" level=debug msg="The path of /etc/resolv.conf in the mount ns is \"/etc/resolv.conf\"" [DEBUG netavark::network::validation] Validating network namespace... [DEBUG netavark::commands::setup] Setting up... [INFO netavark::firewall] Using nftables firewall driver [DEBUG netavark::network::bridge] Setup network podman-default-kube-network [DEBUG netavark::network::bridge] Container interface name: eth0 with IP addresses [10.89.0.2/24] [DEBUG netavark::network::bridge] Bridge name: podman1 with IP addresses [10.89.0.1/24] [DEBUG netavark::network::core_utils] Setting sysctl value for net.ipv4.ip_forward to 1 [DEBUG netavark::network::core_utils] Setting sysctl value for /proc/sys/net/ipv4/conf/podman1/rp_filter to 2 [DEBUG netavark::network::core_utils] Setting sysctl value for /proc/sys/net/ipv6/conf/eth0/autoconf to 0 [DEBUG netavark::network::core_utils] Setting sysctl value for /proc/sys/net/ipv4/conf/eth0/arp_notify to 1 [DEBUG netavark::network::core_utils] Setting sysctl value for /proc/sys/net/ipv4/conf/eth0/rp_filter to 2 [INFO netavark::network::netlink] Adding route (dest: 0.0.0.0/0 ,gw: 10.89.0.1, metric 100) [INFO netavark::firewall::nft] Creating container chain nv_cafde5e4_10_89_0_0_nm24 [DEBUG netavark::network::core_utils] Setting sysctl value for net.ipv4.conf.podman1.route_localnet to 1 [DEBUG netavark::dns::aardvark] Spawning aardvark server [DEBUG netavark::dns::aardvark] start aardvark-dns: ["systemd-run", "-q", "--scope", "--user", "/usr/libexec/podman/aardvark-dns", "--config", "/run/user/3001/containers/networks/aardvark-dns", "-p", "53", "run"] [DEBUG netavark::commands::setup] { "podman-default-kube-network": StatusBlock { dns_search_domains: Some( [ "dns.podman", ], ), dns_server_ips: Some( [ 10.89.0.1, ], ), interfaces: Some( { "eth0": NetInterface { mac_address: "de:0a:de:23:41:22", subnets: Some( [ NetAddress { gateway: Some( 10.89.0.1, ), ipnet: 10.89.0.2/24, }, ], ), }, }, ), }, } [DEBUG netavark::commands::setup] Setup complete time="2025-01-07T09:12:18-05:00" level=debug msg="rootlessport: time=\"2025-01-07T09:12:18-05:00\" level=info msg=\"Starting parent driver\"\ntime=\"2025-01-07T09:12:18-05:00\" level=info msg=\"opaque=map[builtin.readypipepath:/run/user/3001/libpod/tmp/rootlessport3024916983/.bp-ready.pipe builtin.socketpath:/run/user/3001/libpod/tmp/rootlessport3024916983/.bp.sock]\"\n" time="2025-01-07T09:12:18-05:00" level=debug msg="rootlessport: time=\"2025-01-07T09:12:18-05:00\" level=info msg=\"Starting child driver in child netns (\\\"/proc/self/exe\\\" [rootlessport-child])\"\n" time="2025-01-07T09:12:18-05:00" level=debug msg="rootlessport: time=\"2025-01-07T09:12:18-05:00\" level=info msg=\"Waiting for initComplete\"\n" time="2025-01-07T09:12:18-05:00" level=debug msg="rootlessport: time=\"2025-01-07T09:12:18-05:00\" level=info msg=\"initComplete is closed; parent and child established the communication channel\"\n" time="2025-01-07T09:12:18-05:00" level=debug msg="rootlessport: time=\"2025-01-07T09:12:18-05:00\" level=info msg=\"Exposing ports [{ 80 15001 1 tcp}]\"\n" time="2025-01-07T09:12:18-05:00" level=debug msg="rootlessport: time=\"2025-01-07T09:12:18-05:00\" level=info msg=Ready\n" time="2025-01-07T09:12:18-05:00" level=debug msg="rootlessport is ready" time="2025-01-07T09:12:18-05:00" level=debug msg="/proc/sys/crypto/fips_enabled does not contain '1', not adding FIPS mode bind mounts" time="2025-01-07T09:12:18-05:00" level=debug msg="Setting Cgroups for container bce005bb46a5e13ccfecf7373d40775f878fcdddc0a8a4651d9fd838ef5979d0 to user-libpod_pod_7ee39644f44c212ed8c9468fbb32c7003e71e8cf99bc08bae81d105ea55f94f3.slice:libpod:bce005bb46a5e13ccfecf7373d40775f878fcdddc0a8a4651d9fd838ef5979d0" time="2025-01-07T09:12:18-05:00" level=debug msg="reading hooks from /usr/share/containers/oci/hooks.d" time="2025-01-07T09:12:18-05:00" level=debug msg="Workdir \"/\" resolved to host path \"/home/podman_basic_user/.local/share/containers/storage/overlay/2d547bdbebb50560278983b8d7b53c59ce086159f616cc1759969373b402b864/merged\"" time="2025-01-07T09:12:18-05:00" level=debug msg="Created OCI spec for container bce005bb46a5e13ccfecf7373d40775f878fcdddc0a8a4651d9fd838ef5979d0 at /home/podman_basic_user/.local/share/containers/storage/overlay-containers/bce005bb46a5e13ccfecf7373d40775f878fcdddc0a8a4651d9fd838ef5979d0/userdata/config.json" time="2025-01-07T09:12:18-05:00" level=debug msg="Created cgroup path user.slice/user-libpod_pod_7ee39644f44c212ed8c9468fbb32c7003e71e8cf99bc08bae81d105ea55f94f3.slice for parent user.slice and name libpod_pod_7ee39644f44c212ed8c9468fbb32c7003e71e8cf99bc08bae81d105ea55f94f3" time="2025-01-07T09:12:18-05:00" level=debug msg="Created cgroup user.slice/user-libpod_pod_7ee39644f44c212ed8c9468fbb32c7003e71e8cf99bc08bae81d105ea55f94f3.slice" time="2025-01-07T09:12:18-05:00" level=debug msg="Got pod cgroup as user.slice/user-3001.slice/user@3001.service/user.slice/user-libpod_pod_7ee39644f44c212ed8c9468fbb32c7003e71e8cf99bc08bae81d105ea55f94f3.slice" time="2025-01-07T09:12:18-05:00" level=debug msg="/usr/bin/conmon messages will be logged to syslog" time="2025-01-07T09:12:18-05:00" level=debug msg="running conmon: /usr/bin/conmon" args="[--api-version 1 -c bce005bb46a5e13ccfecf7373d40775f878fcdddc0a8a4651d9fd838ef5979d0 -u bce005bb46a5e13ccfecf7373d40775f878fcdddc0a8a4651d9fd838ef5979d0 -r /usr/bin/crun -b /home/podman_basic_user/.local/share/containers/storage/overlay-containers/bce005bb46a5e13ccfecf7373d40775f878fcdddc0a8a4651d9fd838ef5979d0/userdata -p /run/user/3001/containers/overlay-containers/bce005bb46a5e13ccfecf7373d40775f878fcdddc0a8a4651d9fd838ef5979d0/userdata/pidfile -n 7ee39644f44c-infra --exit-dir /run/user/3001/libpod/tmp/exits --persist-dir /run/user/3001/libpod/tmp/persist/bce005bb46a5e13ccfecf7373d40775f878fcdddc0a8a4651d9fd838ef5979d0 --full-attach -s -l k8s-file:/home/podman_basic_user/.local/share/containers/storage/overlay-containers/bce005bb46a5e13ccfecf7373d40775f878fcdddc0a8a4651d9fd838ef5979d0/userdata/ctr.log --log-level debug --syslog --conmon-pidfile /run/user/3001/containers/overlay-containers/bce005bb46a5e13ccfecf7373d40775f878fcdddc0a8a4651d9fd838ef5979d0/userdata/conmon.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /home/podman_basic_user/.local/share/containers/storage --exit-command-arg --runroot --exit-command-arg /run/user/3001/containers --exit-command-arg --log-level --exit-command-arg debug --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/user/3001/libpod/tmp --exit-command-arg --network-config-dir --exit-command-arg --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /home/podman_basic_user/.local/share/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --events-backend --exit-command-arg file --exit-command-arg --syslog --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg bce005bb46a5e13ccfecf7373d40775f878fcdddc0a8a4651d9fd838ef5979d0]" time="2025-01-07T09:12:18-05:00" level=info msg="Running conmon under slice user-libpod_pod_7ee39644f44c212ed8c9468fbb32c7003e71e8cf99bc08bae81d105ea55f94f3.slice and unitName libpod-conmon-bce005bb46a5e13ccfecf7373d40775f878fcdddc0a8a4651d9fd838ef5979d0.scope" [conmon:d]: failed to write to /proc/self/oom_score_adj: Permission denied time="2025-01-07T09:12:18-05:00" level=debug msg="Received: 24341" time="2025-01-07T09:12:18-05:00" level=info msg="Got Conmon PID as 24339" time="2025-01-07T09:12:18-05:00" level=debug msg="Created container bce005bb46a5e13ccfecf7373d40775f878fcdddc0a8a4651d9fd838ef5979d0 in OCI runtime" time="2025-01-07T09:12:18-05:00" level=debug msg="Adding nameserver(s) from network status of '[\"10.89.0.1\"]'" time="2025-01-07T09:12:18-05:00" level=debug msg="Adding search domain(s) from network status of '[\"dns.podman\"]'" time="2025-01-07T09:12:18-05:00" level=debug msg="Starting container bce005bb46a5e13ccfecf7373d40775f878fcdddc0a8a4651d9fd838ef5979d0 with command [/catatonit -P]" time="2025-01-07T09:12:18-05:00" level=debug msg="Started container bce005bb46a5e13ccfecf7373d40775f878fcdddc0a8a4651d9fd838ef5979d0" time="2025-01-07T09:12:18-05:00" level=debug msg="overlay: mount_data=lowerdir=/home/podman_basic_user/.local/share/containers/storage/overlay/l/TK3CDRREGBBMRE5LHFO2QTPNJ2,upperdir=/home/podman_basic_user/.local/share/containers/storage/overlay/a30b32a7353f58f524ea4500fa87625888bbea4ca81c6c02ea01fc00360b92b4/diff,workdir=/home/podman_basic_user/.local/share/containers/storage/overlay/a30b32a7353f58f524ea4500fa87625888bbea4ca81c6c02ea01fc00360b92b4/work,userxattr,context=\"system_u:object_r:container_file_t:s0:c515,c561\"" time="2025-01-07T09:12:18-05:00" level=debug msg="Mounted container \"601607edc0bad5ac48b6a946a7023c40d08491d57765f188c572ad31abeeb41a\" at \"/home/podman_basic_user/.local/share/containers/storage/overlay/a30b32a7353f58f524ea4500fa87625888bbea4ca81c6c02ea01fc00360b92b4/merged\"" time="2025-01-07T09:12:18-05:00" level=debug msg="Created root filesystem for container 601607edc0bad5ac48b6a946a7023c40d08491d57765f188c572ad31abeeb41a at /home/podman_basic_user/.local/share/containers/storage/overlay/a30b32a7353f58f524ea4500fa87625888bbea4ca81c6c02ea01fc00360b92b4/merged" time="2025-01-07T09:12:18-05:00" level=debug msg="/proc/sys/crypto/fips_enabled does not contain '1', not adding FIPS mode bind mounts" time="2025-01-07T09:12:18-05:00" level=debug msg="Setting Cgroups for container 601607edc0bad5ac48b6a946a7023c40d08491d57765f188c572ad31abeeb41a to user-libpod_pod_7ee39644f44c212ed8c9468fbb32c7003e71e8cf99bc08bae81d105ea55f94f3.slice:libpod:601607edc0bad5ac48b6a946a7023c40d08491d57765f188c572ad31abeeb41a" time="2025-01-07T09:12:18-05:00" level=debug msg="reading hooks from /usr/share/containers/oci/hooks.d" time="2025-01-07T09:12:18-05:00" level=debug msg="Workdir \"/var/www\" resolved to a volume or mount" time="2025-01-07T09:12:18-05:00" level=debug msg="Created OCI spec for container 601607edc0bad5ac48b6a946a7023c40d08491d57765f188c572ad31abeeb41a at /home/podman_basic_user/.local/share/containers/storage/overlay-containers/601607edc0bad5ac48b6a946a7023c40d08491d57765f188c572ad31abeeb41a/userdata/config.json" time="2025-01-07T09:12:18-05:00" level=debug msg="Created cgroup path user.slice/user-libpod_pod_7ee39644f44c212ed8c9468fbb32c7003e71e8cf99bc08bae81d105ea55f94f3.slice for parent user.slice and name libpod_pod_7ee39644f44c212ed8c9468fbb32c7003e71e8cf99bc08bae81d105ea55f94f3" time="2025-01-07T09:12:18-05:00" level=debug msg="Created cgroup user.slice/user-libpod_pod_7ee39644f44c212ed8c9468fbb32c7003e71e8cf99bc08bae81d105ea55f94f3.slice" time="2025-01-07T09:12:18-05:00" level=debug msg="Got pod cgroup as user.slice/user-3001.slice/user@3001.service/user.slice/user-libpod_pod_7ee39644f44c212ed8c9468fbb32c7003e71e8cf99bc08bae81d105ea55f94f3.slice" time="2025-01-07T09:12:18-05:00" level=debug msg="/usr/bin/conmon messages will be logged to syslog" time="2025-01-07T09:12:18-05:00" level=debug msg="running conmon: /usr/bin/conmon" args="[--api-version 1 -c 601607edc0bad5ac48b6a946a7023c40d08491d57765f188c572ad31abeeb41a -u 601607edc0bad5ac48b6a946a7023c40d08491d57765f188c572ad31abeeb41a -r /usr/bin/crun -b /home/podman_basic_user/.local/share/containers/storage/overlay-containers/601607edc0bad5ac48b6a946a7023c40d08491d57765f188c572ad31abeeb41a/userdata -p /run/user/3001/containers/overlay-containers/601607edc0bad5ac48b6a946a7023c40d08491d57765f188c572ad31abeeb41a/userdata/pidfile -n httpd1-httpd1 --exit-dir /run/user/3001/libpod/tmp/exits --persist-dir /run/user/3001/libpod/tmp/persist/601607edc0bad5ac48b6a946a7023c40d08491d57765f188c572ad31abeeb41a --full-attach -s -l k8s-file:/home/podman_basic_user/.local/share/containers/storage/overlay-containers/601607edc0bad5ac48b6a946a7023c40d08491d57765f188c572ad31abeeb41a/userdata/ctr.log --log-level debug --syslog --conmon-pidfile /run/user/3001/containers/overlay-containers/601607edc0bad5ac48b6a946a7023c40d08491d57765f188c572ad31abeeb41a/userdata/conmon.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /home/podman_basic_user/.local/share/containers/storage --exit-command-arg --runroot --exit-command-arg /run/user/3001/containers --exit-command-arg --log-level --exit-command-arg debug --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/user/3001/libpod/tmp --exit-command-arg --network-config-dir --exit-command-arg --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /home/podman_basic_user/.local/share/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --events-backend --exit-command-arg file --exit-command-arg --syslog --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg 601607edc0bad5ac48b6a946a7023c40d08491d57765f188c572ad31abeeb41a]" time="2025-01-07T09:12:18-05:00" level=info msg="Running conmon under slice user-libpod_pod_7ee39644f44c212ed8c9468fbb32c7003e71e8cf99bc08bae81d105ea55f94f3.slice and unitName libpod-conmon-601607edc0bad5ac48b6a946a7023c40d08491d57765f188c572ad31abeeb41a.scope" [conmon:d]: failed to write to /proc/self/oom_score_adj: Permission denied time="2025-01-07T09:12:18-05:00" level=debug msg="Received: 24347" time="2025-01-07T09:12:18-05:00" level=info msg="Got Conmon PID as 24344" time="2025-01-07T09:12:18-05:00" level=debug msg="Created container 601607edc0bad5ac48b6a946a7023c40d08491d57765f188c572ad31abeeb41a in OCI runtime" time="2025-01-07T09:12:18-05:00" level=debug msg="Starting container 601607edc0bad5ac48b6a946a7023c40d08491d57765f188c572ad31abeeb41a with command [/bin/busybox-extras httpd -f -p 80]" time="2025-01-07T09:12:18-05:00" level=debug msg="Started container 601607edc0bad5ac48b6a946a7023c40d08491d57765f188c572ad31abeeb41a" time="2025-01-07T09:12:18-05:00" level=debug msg="Called kube.PersistentPostRunE(/bin/podman play kube --start=true --log-level=debug /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml)" time="2025-01-07T09:12:18-05:00" level=debug msg="Shutting down engines" time="2025-01-07T09:12:18-05:00" level=info msg="Received shutdown.Stop(), terminating!" PID=24240 Jan 07 09:12:18 managed-node2 python3.12[24233]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE rc: 0 Jan 07 09:12:18 managed-node2 sudo[24230]: pam_unix(sudo:session): session closed for user podman_basic_user Jan 07 09:12:19 managed-node2 sudo[24521]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-bqytykouajhgmdahwlaftzhdelfjixfe ; XDG_RUNTIME_DIR=/run/user/3001 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736259138.979414-15371-200801400085149/AnsiballZ_systemd.py' Jan 07 09:12:19 managed-node2 sudo[24521]: pam_systemd(sudo:session): New sd-bus connection (system-bus-pam-systemd-24521) opened. Jan 07 09:12:19 managed-node2 sudo[24521]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0) Jan 07 09:12:19 managed-node2 python3.12[24524]: ansible-systemd Invoked with daemon_reload=True scope=user daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 07 09:12:19 managed-node2 systemd[23047]: Reload requested from client PID 24525 ('systemctl')... Jan 07 09:12:19 managed-node2 systemd[23047]: Reloading... Jan 07 09:12:19 managed-node2 systemd[23047]: Reloading finished in 44 ms. Jan 07 09:12:19 managed-node2 sudo[24521]: pam_unix(sudo:session): session closed for user podman_basic_user Jan 07 09:12:19 managed-node2 sudo[24707]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-efqzexmaanhcvrywxycthvkoeunbtoqc ; XDG_RUNTIME_DIR=/run/user/3001 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736259139.7113974-15414-201876225858933/AnsiballZ_systemd.py' Jan 07 09:12:19 managed-node2 sudo[24707]: pam_systemd(sudo:session): New sd-bus connection (system-bus-pam-systemd-24707) opened. Jan 07 09:12:19 managed-node2 sudo[24707]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0) Jan 07 09:12:20 managed-node2 python3.12[24710]: ansible-systemd Invoked with name=podman-kube@-home-podman_basic_user-.config-containers-ansible\x2dkubernetes.d-httpd1.yml.service scope=user enabled=True daemon_reload=False daemon_reexec=False no_block=False state=None force=None masked=None Jan 07 09:12:20 managed-node2 systemd[23047]: Reload requested from client PID 24713 ('systemctl')... Jan 07 09:12:20 managed-node2 systemd[23047]: Reloading... Jan 07 09:12:20 managed-node2 systemd[23047]: Reloading finished in 44 ms. Jan 07 09:12:20 managed-node2 sudo[24707]: pam_unix(sudo:session): session closed for user podman_basic_user Jan 07 09:12:20 managed-node2 sudo[24895]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-jjdbmfgtkwigowclxuedzcwfckspdogg ; XDG_RUNTIME_DIR=/run/user/3001 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736259140.495615-15451-280969617872416/AnsiballZ_systemd.py' Jan 07 09:12:20 managed-node2 sudo[24895]: pam_systemd(sudo:session): New sd-bus connection (system-bus-pam-systemd-24895) opened. Jan 07 09:12:20 managed-node2 sudo[24895]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0) Jan 07 09:12:20 managed-node2 python3.12[24898]: ansible-systemd Invoked with name=podman-kube@-home-podman_basic_user-.config-containers-ansible\x2dkubernetes.d-httpd1.yml.service scope=user state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Jan 07 09:12:20 managed-node2 systemd[23047]: Created slice app-podman\x2dkube.slice - Slice /app/podman-kube. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 83. Jan 07 09:12:20 managed-node2 systemd[23047]: Starting podman-kube@-home-podman_basic_user-.config-containers-ansible\x2dkubernetes.d-httpd1.yml.service - A template for running K8s workloads via podman-kube-play... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 71. Jan 07 09:12:21 managed-node2 aardvark-dns[24323]: Received SIGHUP Jan 07 09:12:21 managed-node2 aardvark-dns[24323]: Successfully parsed config Jan 07 09:12:21 managed-node2 aardvark-dns[24323]: Listen v4 ip {} Jan 07 09:12:21 managed-node2 aardvark-dns[24323]: Listen v6 ip {} Jan 07 09:12:21 managed-node2 aardvark-dns[24323]: No configuration found stopping the sever Jan 07 09:12:21 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Jan 07 09:12:21 managed-node2 kernel: veth0 (unregistering): left allmulticast mode Jan 07 09:12:21 managed-node2 kernel: veth0 (unregistering): left promiscuous mode Jan 07 09:12:21 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Jan 07 09:12:21 managed-node2 /usr/bin/podman[24911]: time="2025-01-07T09:12:21-05:00" level=debug msg="Called cleanup.PersistentPreRunE(/usr/bin/podman --root /home/podman_basic_user/.local/share/containers/storage --runroot /run/user/3001/containers --log-level debug --cgroup-manager systemd --tmpdir /run/user/3001/libpod/tmp --network-config-dir --network-backend netavark --volumepath /home/podman_basic_user/.local/share/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime crun --storage-driver overlay --events-backend file --syslog container cleanup --stopped-only bce005bb46a5e13ccfecf7373d40775f878fcdddc0a8a4651d9fd838ef5979d0)" Jan 07 09:12:21 managed-node2 /usr/bin/podman[24911]: time="2025-01-07T09:12:21-05:00" level=debug msg="Setting custom database backend: \"sqlite\"" Jan 07 09:12:21 managed-node2 /usr/bin/podman[24911]: time="2025-01-07T09:12:21-05:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" Jan 07 09:12:21 managed-node2 /usr/bin/podman[24911]: time="2025-01-07T09:12:21-05:00" level=info msg="Using sqlite as database backend" Jan 07 09:12:21 managed-node2 /usr/bin/podman[24911]: time="2025-01-07T09:12:21-05:00" level=debug msg="systemd-logind: Unknown object '/'." Jan 07 09:12:21 managed-node2 /usr/bin/podman[24911]: time="2025-01-07T09:12:21-05:00" level=debug msg="Using graph driver overlay" Jan 07 09:12:21 managed-node2 /usr/bin/podman[24911]: time="2025-01-07T09:12:21-05:00" level=debug msg="Using graph root /home/podman_basic_user/.local/share/containers/storage" Jan 07 09:12:21 managed-node2 /usr/bin/podman[24911]: time="2025-01-07T09:12:21-05:00" level=debug msg="Using run root /run/user/3001/containers" Jan 07 09:12:21 managed-node2 /usr/bin/podman[24911]: time="2025-01-07T09:12:21-05:00" level=debug msg="Using static dir /home/podman_basic_user/.local/share/containers/storage/libpod" Jan 07 09:12:21 managed-node2 /usr/bin/podman[24911]: time="2025-01-07T09:12:21-05:00" level=debug msg="Using tmp dir /run/user/3001/libpod/tmp" Jan 07 09:12:21 managed-node2 /usr/bin/podman[24911]: time="2025-01-07T09:12:21-05:00" level=debug msg="Using volume path /home/podman_basic_user/.local/share/containers/storage/volumes" Jan 07 09:12:21 managed-node2 /usr/bin/podman[24911]: time="2025-01-07T09:12:21-05:00" level=debug msg="Using transient store: false" Jan 07 09:12:21 managed-node2 /usr/bin/podman[24911]: time="2025-01-07T09:12:21-05:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" Jan 07 09:12:21 managed-node2 /usr/bin/podman[24911]: time="2025-01-07T09:12:21-05:00" level=debug msg="Cached value indicated that overlay is supported" Jan 07 09:12:21 managed-node2 /usr/bin/podman[24911]: time="2025-01-07T09:12:21-05:00" level=debug msg="Cached value indicated that overlay is supported" Jan 07 09:12:21 managed-node2 /usr/bin/podman[24911]: time="2025-01-07T09:12:21-05:00" level=debug msg="Cached value indicated that metacopy is not being used" Jan 07 09:12:21 managed-node2 /usr/bin/podman[24911]: time="2025-01-07T09:12:21-05:00" level=debug msg="Cached value indicated that native-diff is usable" Jan 07 09:12:21 managed-node2 /usr/bin/podman[24911]: time="2025-01-07T09:12:21-05:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=true, usingMetacopy=false" Jan 07 09:12:21 managed-node2 /usr/bin/podman[24911]: time="2025-01-07T09:12:21-05:00" level=debug msg="Initializing event backend file" Jan 07 09:12:21 managed-node2 /usr/bin/podman[24911]: time="2025-01-07T09:12:21-05:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" Jan 07 09:12:21 managed-node2 /usr/bin/podman[24911]: time="2025-01-07T09:12:21-05:00" level=debug msg="Configured OCI runtime crun-vm initialization failed: no valid executable found for OCI runtime crun-vm: invalid argument" Jan 07 09:12:21 managed-node2 /usr/bin/podman[24911]: time="2025-01-07T09:12:21-05:00" level=debug msg="Configured OCI runtime runc initialization failed: no valid executable found for OCI runtime runc: invalid argument" Jan 07 09:12:21 managed-node2 /usr/bin/podman[24911]: time="2025-01-07T09:12:21-05:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" Jan 07 09:12:21 managed-node2 /usr/bin/podman[24911]: time="2025-01-07T09:12:21-05:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" Jan 07 09:12:21 managed-node2 /usr/bin/podman[24911]: time="2025-01-07T09:12:21-05:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" Jan 07 09:12:21 managed-node2 /usr/bin/podman[24911]: time="2025-01-07T09:12:21-05:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" Jan 07 09:12:21 managed-node2 /usr/bin/podman[24911]: time="2025-01-07T09:12:21-05:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" Jan 07 09:12:21 managed-node2 /usr/bin/podman[24911]: time="2025-01-07T09:12:21-05:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" Jan 07 09:12:21 managed-node2 /usr/bin/podman[24911]: time="2025-01-07T09:12:21-05:00" level=debug msg="Using OCI runtime \"/usr/bin/crun\"" Jan 07 09:12:21 managed-node2 /usr/bin/podman[24911]: time="2025-01-07T09:12:21-05:00" level=info msg="Setting parallel job count to 7" Jan 07 09:12:21 managed-node2 /usr/bin/podman[24911]: time="2025-01-07T09:12:21-05:00" level=debug msg="Called cleanup.PersistentPostRunE(/usr/bin/podman --root /home/podman_basic_user/.local/share/containers/storage --runroot /run/user/3001/containers --log-level debug --cgroup-manager systemd --tmpdir /run/user/3001/libpod/tmp --network-config-dir --network-backend netavark --volumepath /home/podman_basic_user/.local/share/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime crun --storage-driver overlay --events-backend file --syslog container cleanup --stopped-only bce005bb46a5e13ccfecf7373d40775f878fcdddc0a8a4651d9fd838ef5979d0)" Jan 07 09:12:21 managed-node2 /usr/bin/podman[24911]: time="2025-01-07T09:12:21-05:00" level=debug msg="Shutting down engines" Jan 07 09:12:21 managed-node2 /usr/bin/podman[24911]: time="2025-01-07T09:12:21-05:00" level=info msg="Received shutdown.Stop(), terminating!" PID=24911 Jan 07 09:12:31 managed-node2 podman[24901]: time="2025-01-07T09:12:31-05:00" level=warning msg="StopSignal SIGTERM failed to stop container httpd1-httpd1 in 10 seconds, resorting to SIGKILL" Jan 07 09:12:31 managed-node2 conmon[24344]: conmon 601607edc0bad5ac48b6 : container 24347 exited with status 137 Jan 07 09:12:31 managed-node2 /usr/bin/podman[24930]: time="2025-01-07T09:12:31-05:00" level=debug msg="Called cleanup.PersistentPreRunE(/usr/bin/podman --root /home/podman_basic_user/.local/share/containers/storage --runroot /run/user/3001/containers --log-level debug --cgroup-manager systemd --tmpdir /run/user/3001/libpod/tmp --network-config-dir --network-backend netavark --volumepath /home/podman_basic_user/.local/share/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime crun --storage-driver overlay --events-backend file --syslog container cleanup --stopped-only 601607edc0bad5ac48b6a946a7023c40d08491d57765f188c572ad31abeeb41a)" Jan 07 09:12:31 managed-node2 /usr/bin/podman[24930]: time="2025-01-07T09:12:31-05:00" level=debug msg="Setting custom database backend: \"sqlite\"" Jan 07 09:12:31 managed-node2 /usr/bin/podman[24930]: time="2025-01-07T09:12:31-05:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" Jan 07 09:12:31 managed-node2 /usr/bin/podman[24930]: time="2025-01-07T09:12:31-05:00" level=info msg="Using sqlite as database backend" Jan 07 09:12:31 managed-node2 /usr/bin/podman[24930]: time="2025-01-07T09:12:31-05:00" level=debug msg="systemd-logind: Unknown object '/'." Jan 07 09:12:31 managed-node2 /usr/bin/podman[24930]: time="2025-01-07T09:12:31-05:00" level=debug msg="Using graph driver overlay" Jan 07 09:12:31 managed-node2 /usr/bin/podman[24930]: time="2025-01-07T09:12:31-05:00" level=debug msg="Using graph root /home/podman_basic_user/.local/share/containers/storage" Jan 07 09:12:31 managed-node2 /usr/bin/podman[24930]: time="2025-01-07T09:12:31-05:00" level=debug msg="Using run root /run/user/3001/containers" Jan 07 09:12:31 managed-node2 /usr/bin/podman[24930]: time="2025-01-07T09:12:31-05:00" level=debug msg="Using static dir /home/podman_basic_user/.local/share/containers/storage/libpod" Jan 07 09:12:31 managed-node2 /usr/bin/podman[24930]: time="2025-01-07T09:12:31-05:00" level=debug msg="Using tmp dir /run/user/3001/libpod/tmp" Jan 07 09:12:31 managed-node2 /usr/bin/podman[24930]: time="2025-01-07T09:12:31-05:00" level=debug msg="Using volume path /home/podman_basic_user/.local/share/containers/storage/volumes" Jan 07 09:12:31 managed-node2 /usr/bin/podman[24930]: time="2025-01-07T09:12:31-05:00" level=debug msg="Using transient store: false" Jan 07 09:12:31 managed-node2 systemd[23047]: Stopping libpod-conmon-601607edc0bad5ac48b6a946a7023c40d08491d57765f188c572ad31abeeb41a.scope... ░░ Subject: A stop job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has begun execution. ░░ ░░ The job identifier is 85. Jan 07 09:12:31 managed-node2 systemd[23047]: Stopped libpod-conmon-601607edc0bad5ac48b6a946a7023c40d08491d57765f188c572ad31abeeb41a.scope. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 85 and the job result is done. Jan 07 09:12:31 managed-node2 systemd[23047]: Removed slice user-libpod_pod_7ee39644f44c212ed8c9468fbb32c7003e71e8cf99bc08bae81d105ea55f94f3.slice - cgroup user-libpod_pod_7ee39644f44c212ed8c9468fbb32c7003e71e8cf99bc08bae81d105ea55f94f3.slice. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 84 and the job result is done. Jan 07 09:12:31 managed-node2 systemd[23047]: user-libpod_pod_7ee39644f44c212ed8c9468fbb32c7003e71e8cf99bc08bae81d105ea55f94f3.slice: Failed to open /run/user/3001/systemd/transient/user-libpod_pod_7ee39644f44c212ed8c9468fbb32c7003e71e8cf99bc08bae81d105ea55f94f3.slice: No such file or directory Jan 07 09:12:31 managed-node2 podman[24901]: Pods stopped: Jan 07 09:12:31 managed-node2 podman[24901]: 7ee39644f44c212ed8c9468fbb32c7003e71e8cf99bc08bae81d105ea55f94f3 Jan 07 09:12:31 managed-node2 podman[24901]: Pods removed: Jan 07 09:12:31 managed-node2 podman[24901]: 7ee39644f44c212ed8c9468fbb32c7003e71e8cf99bc08bae81d105ea55f94f3 Jan 07 09:12:31 managed-node2 podman[24901]: Secrets removed: Jan 07 09:12:31 managed-node2 podman[24901]: Volumes removed: Jan 07 09:12:31 managed-node2 systemd[23047]: Created slice user-libpod_pod_6fc0f10f49f3252eab0b8d03ec2b15b5707dcb0fe0cd0847c0f6dd05fdbde185.slice - cgroup user-libpod_pod_6fc0f10f49f3252eab0b8d03ec2b15b5707dcb0fe0cd0847c0f6dd05fdbde185.slice. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 86. Jan 07 09:12:31 managed-node2 systemd[23047]: Started libpod-c63a6bf4648719515a4699103a0db4aec20870161a7b837e1f996172cb7c0a19.scope - libcrun container. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 90. Jan 07 09:12:31 managed-node2 systemd[23047]: Started rootless-netns-6598f252.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 94. Jan 07 09:12:31 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Jan 07 09:12:31 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Jan 07 09:12:31 managed-node2 kernel: veth0: entered allmulticast mode Jan 07 09:12:31 managed-node2 kernel: veth0: entered promiscuous mode Jan 07 09:12:31 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Jan 07 09:12:31 managed-node2 kernel: podman1: port 1(veth0) entered forwarding state Jan 07 09:12:31 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Jan 07 09:12:31 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Jan 07 09:12:31 managed-node2 kernel: podman1: port 1(veth0) entered forwarding state Jan 07 09:12:31 managed-node2 systemd[23047]: Started run-r263f95b073e04414b82b4f3cc94fc83d.scope - /usr/libexec/podman/aardvark-dns --config /run/user/3001/containers/networks/aardvark-dns -p 53 run. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 98. Jan 07 09:12:31 managed-node2 systemd[23047]: Started libpod-845db18e0a114d44bb99b856f4c82915865862ec0a3de9e09c1e3addba107323.scope - libcrun container. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 102. Jan 07 09:12:31 managed-node2 systemd[23047]: Started libpod-23f6f23a68db521dffb7a208e5841f2c5ef904642c86114498c6544bb891d417.scope - libcrun container. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 107. Jan 07 09:12:31 managed-node2 podman[24901]: Pod: Jan 07 09:12:31 managed-node2 podman[24901]: 6fc0f10f49f3252eab0b8d03ec2b15b5707dcb0fe0cd0847c0f6dd05fdbde185 Jan 07 09:12:31 managed-node2 podman[24901]: Container: Jan 07 09:12:31 managed-node2 podman[24901]: 23f6f23a68db521dffb7a208e5841f2c5ef904642c86114498c6544bb891d417 Jan 07 09:12:31 managed-node2 systemd[23047]: Started podman-kube@-home-podman_basic_user-.config-containers-ansible\x2dkubernetes.d-httpd1.yml.service - A template for running K8s workloads via podman-kube-play. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 71. Jan 07 09:12:31 managed-node2 sudo[24895]: pam_unix(sudo:session): session closed for user podman_basic_user Jan 07 09:12:32 managed-node2 python3.12[25117]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None Jan 07 09:12:33 managed-node2 python3.12[25249]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 07 09:12:34 managed-node2 python3.12[25382]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/httpd2.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 07 09:12:36 managed-node2 python3.12[25514]: ansible-file Invoked with path=/tmp/lsr_kom0gs4j_podman/httpd2 state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Jan 07 09:12:36 managed-node2 python3.12[25645]: ansible-file Invoked with path=/tmp/lsr_kom0gs4j_podman/httpd2-create state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Jan 07 09:12:37 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 07 09:12:37 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 07 09:12:38 managed-node2 podman[25806]: 2025-01-07 09:12:38.971262362 -0500 EST m=+1.698124942 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610 Jan 07 09:12:38 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 07 09:12:39 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 07 09:12:39 managed-node2 python3.12[25951]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 07 09:12:39 managed-node2 python3.12[26082]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 07 09:12:40 managed-node2 python3.12[26213]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 07 09:12:40 managed-node2 python3.12[26318]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/ansible-kubernetes.d/httpd2.yml owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1736259160.0679595-16278-120616227644317/.source.yml _original_basename=.tnnbkt8f follow=False checksum=a3cc229a74f6c618c5c624f29b34a65abdf49afb backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jan 07 09:12:41 managed-node2 python3.12[26449]: ansible-containers.podman.podman_play Invoked with state=started debug=True log_level=debug kube_file=/etc/containers/ansible-kubernetes.d/httpd2.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None quiet=None recreate=None userns=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Jan 07 09:12:41 managed-node2 systemd[1]: Created slice machine-libpod_pod_8f0fc920afcf00e279a9b03adf320d9dc3479f7fd5a25611aee0e7dfa8dddc68.slice - cgroup machine-libpod_pod_8f0fc920afcf00e279a9b03adf320d9dc3479f7fd5a25611aee0e7dfa8dddc68.slice. ░░ Subject: A start job for unit machine-libpod_pod_8f0fc920afcf00e279a9b03adf320d9dc3479f7fd5a25611aee0e7dfa8dddc68.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit machine-libpod_pod_8f0fc920afcf00e279a9b03adf320d9dc3479f7fd5a25611aee0e7dfa8dddc68.slice has finished successfully. ░░ ░░ The job identifier is 2155. Jan 07 09:12:41 managed-node2 podman[26456]: 2025-01-07 09:12:41.252793882 -0500 EST m=+0.070623561 container create ae0a056053dd5ad5a2effac7e82699f32fe04138febd524d2465eda1dd4568ea (image=localhost/podman-pause:5.3.1-1733097600, name=8f0fc920afcf-infra, pod_id=8f0fc920afcf00e279a9b03adf320d9dc3479f7fd5a25611aee0e7dfa8dddc68, io.buildah.version=1.38.0) Jan 07 09:12:41 managed-node2 podman[26456]: 2025-01-07 09:12:41.257483181 -0500 EST m=+0.075312775 pod create 8f0fc920afcf00e279a9b03adf320d9dc3479f7fd5a25611aee0e7dfa8dddc68 (image=, name=httpd2) Jan 07 09:12:41 managed-node2 podman[26456]: 2025-01-07 09:12:41.283955008 -0500 EST m=+0.101784609 container create c681c2767734a4dfc45b77adb3f727c394f2eb04b4e1f9b759299b366d1e80c3 (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=8f0fc920afcf00e279a9b03adf320d9dc3479f7fd5a25611aee0e7dfa8dddc68, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, app=test, io.containers.autoupdate=registry) Jan 07 09:12:41 managed-node2 podman[26456]: 2025-01-07 09:12:41.260210262 -0500 EST m=+0.078039902 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610 Jan 07 09:12:41 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Jan 07 09:12:41 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Jan 07 09:12:41 managed-node2 kernel: veth0: entered allmulticast mode Jan 07 09:12:41 managed-node2 kernel: veth0: entered promiscuous mode Jan 07 09:12:41 managed-node2 NetworkManager[780]: [1736259161.3096] manager: (podman1): new Bridge device (/org/freedesktop/NetworkManager/Devices/3) Jan 07 09:12:41 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Jan 07 09:12:41 managed-node2 kernel: podman1: port 1(veth0) entered forwarding state Jan 07 09:12:41 managed-node2 NetworkManager[780]: [1736259161.3132] device (podman1): carrier: link connected Jan 07 09:12:41 managed-node2 NetworkManager[780]: [1736259161.3156] device (veth0): carrier: link connected Jan 07 09:12:41 managed-node2 NetworkManager[780]: [1736259161.3159] manager: (veth0): new Veth device (/org/freedesktop/NetworkManager/Devices/4) Jan 07 09:12:41 managed-node2 (udev-worker)[26472]: Network interface NamePolicy= disabled on kernel command line. Jan 07 09:12:41 managed-node2 (udev-worker)[26471]: Network interface NamePolicy= disabled on kernel command line. Jan 07 09:12:41 managed-node2 NetworkManager[780]: [1736259161.3747] device (podman1): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external') Jan 07 09:12:41 managed-node2 NetworkManager[780]: [1736259161.3752] device (podman1): state change: unavailable -> disconnected (reason 'connection-assumed', managed-type: 'external') Jan 07 09:12:41 managed-node2 NetworkManager[780]: [1736259161.3757] device (podman1): Activation: starting connection 'podman1' (34db12e8-8c9b-44ab-93ea-b15259092d20) Jan 07 09:12:41 managed-node2 NetworkManager[780]: [1736259161.3772] device (podman1): state change: disconnected -> prepare (reason 'none', managed-type: 'external') Jan 07 09:12:41 managed-node2 NetworkManager[780]: [1736259161.3774] device (podman1): state change: prepare -> config (reason 'none', managed-type: 'external') Jan 07 09:12:41 managed-node2 NetworkManager[780]: [1736259161.3776] device (podman1): state change: config -> ip-config (reason 'none', managed-type: 'external') Jan 07 09:12:41 managed-node2 NetworkManager[780]: [1736259161.3778] device (podman1): state change: ip-config -> ip-check (reason 'none', managed-type: 'external') Jan 07 09:12:41 managed-node2 systemd[1]: Started run-rf869b629f5524c4e8aba6919e24741dc.scope - /usr/libexec/podman/aardvark-dns --config /run/containers/networks/aardvark-dns -p 53 run. ░░ Subject: A start job for unit run-rf869b629f5524c4e8aba6919e24741dc.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit run-rf869b629f5524c4e8aba6919e24741dc.scope has finished successfully. ░░ ░░ The job identifier is 2241. Jan 07 09:12:41 managed-node2 systemd[1]: Starting NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service... ░░ Subject: A start job for unit NetworkManager-dispatcher.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has begun execution. ░░ ░░ The job identifier is 2162. Jan 07 09:12:41 managed-node2 aardvark-dns[26496]: starting aardvark on a child with pid 26498 Jan 07 09:12:41 managed-node2 aardvark-dns[26498]: Successfully parsed config Jan 07 09:12:41 managed-node2 aardvark-dns[26498]: Listen v4 ip {"podman-default-kube-network": [10.89.0.1]} Jan 07 09:12:41 managed-node2 aardvark-dns[26498]: Listen v6 ip {} Jan 07 09:12:41 managed-node2 aardvark-dns[26498]: Using the following upstream servers: [10.29.169.13:53, 10.29.170.12:53, 10.2.32.1:53] Jan 07 09:12:41 managed-node2 systemd[1]: Started NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service. ░░ Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has finished successfully. ░░ ░░ The job identifier is 2162. Jan 07 09:12:41 managed-node2 NetworkManager[780]: [1736259161.4427] device (podman1): state change: ip-check -> secondaries (reason 'none', managed-type: 'external') Jan 07 09:12:41 managed-node2 NetworkManager[780]: [1736259161.4444] device (podman1): state change: secondaries -> activated (reason 'none', managed-type: 'external') Jan 07 09:12:41 managed-node2 NetworkManager[780]: [1736259161.4449] device (podman1): Activation: successful, device activated. Jan 07 09:12:41 managed-node2 systemd[1]: Started libpod-conmon-ae0a056053dd5ad5a2effac7e82699f32fe04138febd524d2465eda1dd4568ea.scope. ░░ Subject: A start job for unit libpod-conmon-ae0a056053dd5ad5a2effac7e82699f32fe04138febd524d2465eda1dd4568ea.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-conmon-ae0a056053dd5ad5a2effac7e82699f32fe04138febd524d2465eda1dd4568ea.scope has finished successfully. ░░ ░░ The job identifier is 2247. Jan 07 09:12:41 managed-node2 conmon[26512]: conmon ae0a056053dd5ad5a2ef : addr{sun_family=AF_UNIX, sun_path=/proc/self/fd/13/attach} Jan 07 09:12:41 managed-node2 conmon[26512]: conmon ae0a056053dd5ad5a2ef : terminal_ctrl_fd: 13 Jan 07 09:12:41 managed-node2 conmon[26512]: conmon ae0a056053dd5ad5a2ef : winsz read side: 17, winsz write side: 18 Jan 07 09:12:41 managed-node2 systemd[1]: Started libpod-ae0a056053dd5ad5a2effac7e82699f32fe04138febd524d2465eda1dd4568ea.scope - libcrun container. ░░ Subject: A start job for unit libpod-ae0a056053dd5ad5a2effac7e82699f32fe04138febd524d2465eda1dd4568ea.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-ae0a056053dd5ad5a2effac7e82699f32fe04138febd524d2465eda1dd4568ea.scope has finished successfully. ░░ ░░ The job identifier is 2254. Jan 07 09:12:41 managed-node2 conmon[26512]: conmon ae0a056053dd5ad5a2ef : container PID: 26514 Jan 07 09:12:41 managed-node2 podman[26456]: 2025-01-07 09:12:41.507460159 -0500 EST m=+0.325289761 container init ae0a056053dd5ad5a2effac7e82699f32fe04138febd524d2465eda1dd4568ea (image=localhost/podman-pause:5.3.1-1733097600, name=8f0fc920afcf-infra, pod_id=8f0fc920afcf00e279a9b03adf320d9dc3479f7fd5a25611aee0e7dfa8dddc68, io.buildah.version=1.38.0) Jan 07 09:12:41 managed-node2 podman[26456]: 2025-01-07 09:12:41.511387783 -0500 EST m=+0.329217447 container start ae0a056053dd5ad5a2effac7e82699f32fe04138febd524d2465eda1dd4568ea (image=localhost/podman-pause:5.3.1-1733097600, name=8f0fc920afcf-infra, pod_id=8f0fc920afcf00e279a9b03adf320d9dc3479f7fd5a25611aee0e7dfa8dddc68, io.buildah.version=1.38.0) Jan 07 09:12:41 managed-node2 systemd[1]: Started libpod-conmon-c681c2767734a4dfc45b77adb3f727c394f2eb04b4e1f9b759299b366d1e80c3.scope. ░░ Subject: A start job for unit libpod-conmon-c681c2767734a4dfc45b77adb3f727c394f2eb04b4e1f9b759299b366d1e80c3.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-conmon-c681c2767734a4dfc45b77adb3f727c394f2eb04b4e1f9b759299b366d1e80c3.scope has finished successfully. ░░ ░░ The job identifier is 2261. Jan 07 09:12:41 managed-node2 conmon[26517]: conmon c681c2767734a4dfc45b : addr{sun_family=AF_UNIX, sun_path=/proc/self/fd/12/attach} Jan 07 09:12:41 managed-node2 conmon[26517]: conmon c681c2767734a4dfc45b : terminal_ctrl_fd: 12 Jan 07 09:12:41 managed-node2 conmon[26517]: conmon c681c2767734a4dfc45b : winsz read side: 16, winsz write side: 17 Jan 07 09:12:41 managed-node2 systemd[1]: Started libpod-c681c2767734a4dfc45b77adb3f727c394f2eb04b4e1f9b759299b366d1e80c3.scope - libcrun container. ░░ Subject: A start job for unit libpod-c681c2767734a4dfc45b77adb3f727c394f2eb04b4e1f9b759299b366d1e80c3.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-c681c2767734a4dfc45b77adb3f727c394f2eb04b4e1f9b759299b366d1e80c3.scope has finished successfully. ░░ ░░ The job identifier is 2268. Jan 07 09:12:41 managed-node2 conmon[26517]: conmon c681c2767734a4dfc45b : container PID: 26519 Jan 07 09:12:41 managed-node2 podman[26456]: 2025-01-07 09:12:41.574978664 -0500 EST m=+0.392808281 container init c681c2767734a4dfc45b77adb3f727c394f2eb04b4e1f9b759299b366d1e80c3 (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=8f0fc920afcf00e279a9b03adf320d9dc3479f7fd5a25611aee0e7dfa8dddc68, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, app=test) Jan 07 09:12:41 managed-node2 podman[26456]: 2025-01-07 09:12:41.577467831 -0500 EST m=+0.395297502 container start c681c2767734a4dfc45b77adb3f727c394f2eb04b4e1f9b759299b366d1e80c3 (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=8f0fc920afcf00e279a9b03adf320d9dc3479f7fd5a25611aee0e7dfa8dddc68, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, app=test) Jan 07 09:12:41 managed-node2 podman[26456]: 2025-01-07 09:12:41.582818566 -0500 EST m=+0.400648090 pod start 8f0fc920afcf00e279a9b03adf320d9dc3479f7fd5a25611aee0e7dfa8dddc68 (image=, name=httpd2) Jan 07 09:12:41 managed-node2 python3.12[26449]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE command: /usr/bin/podman play kube --start=true --log-level=debug /etc/containers/ansible-kubernetes.d/httpd2.yml Jan 07 09:12:41 managed-node2 python3.12[26449]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stdout: Pod: 8f0fc920afcf00e279a9b03adf320d9dc3479f7fd5a25611aee0e7dfa8dddc68 Container: c681c2767734a4dfc45b77adb3f727c394f2eb04b4e1f9b759299b366d1e80c3 Jan 07 09:12:41 managed-node2 python3.12[26449]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stderr: time="2025-01-07T09:12:41-05:00" level=info msg="/usr/bin/podman filtering at log level debug" time="2025-01-07T09:12:41-05:00" level=debug msg="Called kube.PersistentPreRunE(/usr/bin/podman play kube --start=true --log-level=debug /etc/containers/ansible-kubernetes.d/httpd2.yml)" time="2025-01-07T09:12:41-05:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" time="2025-01-07T09:12:41-05:00" level=info msg="Using sqlite as database backend" time="2025-01-07T09:12:41-05:00" level=debug msg="Using graph driver overlay" time="2025-01-07T09:12:41-05:00" level=debug msg="Using graph root /var/lib/containers/storage" time="2025-01-07T09:12:41-05:00" level=debug msg="Using run root /run/containers/storage" time="2025-01-07T09:12:41-05:00" level=debug msg="Using static dir /var/lib/containers/storage/libpod" time="2025-01-07T09:12:41-05:00" level=debug msg="Using tmp dir /run/libpod" time="2025-01-07T09:12:41-05:00" level=debug msg="Using volume path /var/lib/containers/storage/volumes" time="2025-01-07T09:12:41-05:00" level=debug msg="Using transient store: false" time="2025-01-07T09:12:41-05:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" time="2025-01-07T09:12:41-05:00" level=debug msg="Cached value indicated that overlay is supported" time="2025-01-07T09:12:41-05:00" level=debug msg="Cached value indicated that overlay is supported" time="2025-01-07T09:12:41-05:00" level=debug msg="Cached value indicated that metacopy is being used" time="2025-01-07T09:12:41-05:00" level=debug msg="Cached value indicated that native-diff is not being used" time="2025-01-07T09:12:41-05:00" level=info msg="Not using native diff for overlay, this may cause degraded performance for building images: kernel has CONFIG_OVERLAY_FS_REDIRECT_DIR enabled" time="2025-01-07T09:12:41-05:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=false, usingMetacopy=true" time="2025-01-07T09:12:41-05:00" level=debug msg="Initializing event backend journald" time="2025-01-07T09:12:41-05:00" level=debug msg="Configured OCI runtime crun-vm initialization failed: no valid executable found for OCI runtime crun-vm: invalid argument" time="2025-01-07T09:12:41-05:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" time="2025-01-07T09:12:41-05:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" time="2025-01-07T09:12:41-05:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" time="2025-01-07T09:12:41-05:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" time="2025-01-07T09:12:41-05:00" level=debug msg="Configured OCI runtime runc initialization failed: no valid executable found for OCI runtime runc: invalid argument" time="2025-01-07T09:12:41-05:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" time="2025-01-07T09:12:41-05:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" time="2025-01-07T09:12:41-05:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" time="2025-01-07T09:12:41-05:00" level=debug msg="Using OCI runtime \"/usr/bin/crun\"" time="2025-01-07T09:12:41-05:00" level=info msg="Setting parallel job count to 7" time="2025-01-07T09:12:41-05:00" level=debug msg="Successfully loaded network podman-default-kube-network: &{podman-default-kube-network fe17f9641389980d9286eb85f735b0683746d93051a2476303460ddb3b1d2db9 bridge podman1 2025-01-07 09:10:18.933530103 -0500 EST [{{{10.89.0.0 ffffff00}} 10.89.0.1 }] [] false false true [] map[] map[] map[driver:host-local]}" time="2025-01-07T09:12:41-05:00" level=debug msg="Successfully loaded 2 networks" time="2025-01-07T09:12:41-05:00" level=debug msg="Looking up image \"localhost/podman-pause:5.3.1-1733097600\" in local containers storage" time="2025-01-07T09:12:41-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-01-07T09:12:41-05:00" level=debug msg="Trying \"localhost/podman-pause:5.3.1-1733097600\" ..." time="2025-01-07T09:12:41-05:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@df073f11d00fb4b98ef78dc488ede5a74765d3c444d6f30ab4429fc493278927\"" time="2025-01-07T09:12:41-05:00" level=debug msg="Found image \"localhost/podman-pause:5.3.1-1733097600\" as \"localhost/podman-pause:5.3.1-1733097600\" in local containers storage" time="2025-01-07T09:12:41-05:00" level=debug msg="Found image \"localhost/podman-pause:5.3.1-1733097600\" as \"localhost/podman-pause:5.3.1-1733097600\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@df073f11d00fb4b98ef78dc488ede5a74765d3c444d6f30ab4429fc493278927)" time="2025-01-07T09:12:41-05:00" level=debug msg="exporting opaque data as blob \"sha256:df073f11d00fb4b98ef78dc488ede5a74765d3c444d6f30ab4429fc493278927\"" time="2025-01-07T09:12:41-05:00" level=debug msg="Pod using bridge network mode" time="2025-01-07T09:12:41-05:00" level=debug msg="Created cgroup path machine.slice/machine-libpod_pod_8f0fc920afcf00e279a9b03adf320d9dc3479f7fd5a25611aee0e7dfa8dddc68.slice for parent machine.slice and name libpod_pod_8f0fc920afcf00e279a9b03adf320d9dc3479f7fd5a25611aee0e7dfa8dddc68" time="2025-01-07T09:12:41-05:00" level=debug msg="Created cgroup machine.slice/machine-libpod_pod_8f0fc920afcf00e279a9b03adf320d9dc3479f7fd5a25611aee0e7dfa8dddc68.slice" time="2025-01-07T09:12:41-05:00" level=debug msg="Got pod cgroup as machine.slice/machine-libpod_pod_8f0fc920afcf00e279a9b03adf320d9dc3479f7fd5a25611aee0e7dfa8dddc68.slice" time="2025-01-07T09:12:41-05:00" level=debug msg="Looking up image \"localhost/podman-pause:5.3.1-1733097600\" in local containers storage" time="2025-01-07T09:12:41-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-01-07T09:12:41-05:00" level=debug msg="Trying \"localhost/podman-pause:5.3.1-1733097600\" ..." time="2025-01-07T09:12:41-05:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@df073f11d00fb4b98ef78dc488ede5a74765d3c444d6f30ab4429fc493278927\"" time="2025-01-07T09:12:41-05:00" level=debug msg="Found image \"localhost/podman-pause:5.3.1-1733097600\" as \"localhost/podman-pause:5.3.1-1733097600\" in local containers storage" time="2025-01-07T09:12:41-05:00" level=debug msg="Found image \"localhost/podman-pause:5.3.1-1733097600\" as \"localhost/podman-pause:5.3.1-1733097600\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@df073f11d00fb4b98ef78dc488ede5a74765d3c444d6f30ab4429fc493278927)" time="2025-01-07T09:12:41-05:00" level=debug msg="exporting opaque data as blob \"sha256:df073f11d00fb4b98ef78dc488ede5a74765d3c444d6f30ab4429fc493278927\"" time="2025-01-07T09:12:41-05:00" level=debug msg="Inspecting image df073f11d00fb4b98ef78dc488ede5a74765d3c444d6f30ab4429fc493278927" time="2025-01-07T09:12:41-05:00" level=debug msg="exporting opaque data as blob \"sha256:df073f11d00fb4b98ef78dc488ede5a74765d3c444d6f30ab4429fc493278927\"" time="2025-01-07T09:12:41-05:00" level=debug msg="Inspecting image df073f11d00fb4b98ef78dc488ede5a74765d3c444d6f30ab4429fc493278927" time="2025-01-07T09:12:41-05:00" level=debug msg="Inspecting image df073f11d00fb4b98ef78dc488ede5a74765d3c444d6f30ab4429fc493278927" time="2025-01-07T09:12:41-05:00" level=debug msg="using systemd mode: false" time="2025-01-07T09:12:41-05:00" level=debug msg="setting container name 8f0fc920afcf-infra" time="2025-01-07T09:12:41-05:00" level=debug msg="Loading seccomp profile from \"/usr/share/containers/seccomp.json\"" time="2025-01-07T09:12:41-05:00" level=debug msg="Allocated lock 1 for container ae0a056053dd5ad5a2effac7e82699f32fe04138febd524d2465eda1dd4568ea" time="2025-01-07T09:12:41-05:00" level=debug msg="exporting opaque data as blob \"sha256:df073f11d00fb4b98ef78dc488ede5a74765d3c444d6f30ab4429fc493278927\"" time="2025-01-07T09:12:41-05:00" level=debug msg="Cached value indicated that idmapped mounts for overlay are supported" time="2025-01-07T09:12:41-05:00" level=debug msg="Created container \"ae0a056053dd5ad5a2effac7e82699f32fe04138febd524d2465eda1dd4568ea\"" time="2025-01-07T09:12:41-05:00" level=debug msg="Container \"ae0a056053dd5ad5a2effac7e82699f32fe04138febd524d2465eda1dd4568ea\" has work directory \"/var/lib/containers/storage/overlay-containers/ae0a056053dd5ad5a2effac7e82699f32fe04138febd524d2465eda1dd4568ea/userdata\"" time="2025-01-07T09:12:41-05:00" level=debug msg="Container \"ae0a056053dd5ad5a2effac7e82699f32fe04138febd524d2465eda1dd4568ea\" has run directory \"/run/containers/storage/overlay-containers/ae0a056053dd5ad5a2effac7e82699f32fe04138febd524d2465eda1dd4568ea/userdata\"" time="2025-01-07T09:12:41-05:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-01-07T09:12:41-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-01-07T09:12:41-05:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2025-01-07T09:12:41-05:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-07T09:12:41-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-01-07T09:12:41-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2025-01-07T09:12:41-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-07T09:12:41-05:00" level=debug msg="Pulling image quay.io/libpod/testimage:20210610 (policy: missing)" time="2025-01-07T09:12:41-05:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-01-07T09:12:41-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-01-07T09:12:41-05:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2025-01-07T09:12:41-05:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-07T09:12:41-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-01-07T09:12:41-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2025-01-07T09:12:41-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-07T09:12:41-05:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-01-07T09:12:41-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-01-07T09:12:41-05:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2025-01-07T09:12:41-05:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-07T09:12:41-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-01-07T09:12:41-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2025-01-07T09:12:41-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-07T09:12:41-05:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2025-01-07T09:12:41-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-07T09:12:41-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-07T09:12:41-05:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-01-07T09:12:41-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-01-07T09:12:41-05:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2025-01-07T09:12:41-05:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-07T09:12:41-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-01-07T09:12:41-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2025-01-07T09:12:41-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-07T09:12:41-05:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2025-01-07T09:12:41-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-07T09:12:41-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-07T09:12:41-05:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2025-01-07T09:12:41-05:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2025-01-07T09:12:41-05:00" level=debug msg="using systemd mode: false" time="2025-01-07T09:12:41-05:00" level=debug msg="adding container to pod httpd2" time="2025-01-07T09:12:41-05:00" level=debug msg="setting container name httpd2-httpd2" time="2025-01-07T09:12:41-05:00" level=debug msg="Loading seccomp profile from \"/usr/share/containers/seccomp.json\"" time="2025-01-07T09:12:41-05:00" level=info msg="Sysctl net.ipv4.ping_group_range=0 0 ignored in containers.conf, since Network Namespace set to host" time="2025-01-07T09:12:41-05:00" level=debug msg="Adding mount /proc" time="2025-01-07T09:12:41-05:00" level=debug msg="Adding mount /dev" time="2025-01-07T09:12:41-05:00" level=debug msg="Adding mount /dev/pts" time="2025-01-07T09:12:41-05:00" level=debug msg="Adding mount /dev/mqueue" time="2025-01-07T09:12:41-05:00" level=debug msg="Adding mount /sys" time="2025-01-07T09:12:41-05:00" level=debug msg="Adding mount /sys/fs/cgroup" time="2025-01-07T09:12:41-05:00" level=debug msg="Allocated lock 2 for container c681c2767734a4dfc45b77adb3f727c394f2eb04b4e1f9b759299b366d1e80c3" time="2025-01-07T09:12:41-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-07T09:12:41-05:00" level=debug msg="Created container \"c681c2767734a4dfc45b77adb3f727c394f2eb04b4e1f9b759299b366d1e80c3\"" time="2025-01-07T09:12:41-05:00" level=debug msg="Container \"c681c2767734a4dfc45b77adb3f727c394f2eb04b4e1f9b759299b366d1e80c3\" has work directory \"/var/lib/containers/storage/overlay-containers/c681c2767734a4dfc45b77adb3f727c394f2eb04b4e1f9b759299b366d1e80c3/userdata\"" time="2025-01-07T09:12:41-05:00" level=debug msg="Container \"c681c2767734a4dfc45b77adb3f727c394f2eb04b4e1f9b759299b366d1e80c3\" has run directory \"/run/containers/storage/overlay-containers/c681c2767734a4dfc45b77adb3f727c394f2eb04b4e1f9b759299b366d1e80c3/userdata\"" time="2025-01-07T09:12:41-05:00" level=debug msg="Strongconnecting node ae0a056053dd5ad5a2effac7e82699f32fe04138febd524d2465eda1dd4568ea" time="2025-01-07T09:12:41-05:00" level=debug msg="Pushed ae0a056053dd5ad5a2effac7e82699f32fe04138febd524d2465eda1dd4568ea onto stack" time="2025-01-07T09:12:41-05:00" level=debug msg="Finishing node ae0a056053dd5ad5a2effac7e82699f32fe04138febd524d2465eda1dd4568ea. Popped ae0a056053dd5ad5a2effac7e82699f32fe04138febd524d2465eda1dd4568ea off stack" time="2025-01-07T09:12:41-05:00" level=debug msg="Strongconnecting node c681c2767734a4dfc45b77adb3f727c394f2eb04b4e1f9b759299b366d1e80c3" time="2025-01-07T09:12:41-05:00" level=debug msg="Pushed c681c2767734a4dfc45b77adb3f727c394f2eb04b4e1f9b759299b366d1e80c3 onto stack" time="2025-01-07T09:12:41-05:00" level=debug msg="Finishing node c681c2767734a4dfc45b77adb3f727c394f2eb04b4e1f9b759299b366d1e80c3. Popped c681c2767734a4dfc45b77adb3f727c394f2eb04b4e1f9b759299b366d1e80c3 off stack" time="2025-01-07T09:12:41-05:00" level=debug msg="overlay: mount_data=lowerdir=/var/lib/containers/storage/overlay/l/WKGQ2CQQTFXYEKRDTAEHJQTPHF,upperdir=/var/lib/containers/storage/overlay/281ea9e93260ff02b0254fe28758f17f91023cd829478d2bd5882af4927740e2/diff,workdir=/var/lib/containers/storage/overlay/281ea9e93260ff02b0254fe28758f17f91023cd829478d2bd5882af4927740e2/work,nodev,metacopy=on,context=\"system_u:object_r:container_file_t:s0:c720,c783\"" time="2025-01-07T09:12:41-05:00" level=debug msg="Mounted container \"ae0a056053dd5ad5a2effac7e82699f32fe04138febd524d2465eda1dd4568ea\" at \"/var/lib/containers/storage/overlay/281ea9e93260ff02b0254fe28758f17f91023cd829478d2bd5882af4927740e2/merged\"" time="2025-01-07T09:12:41-05:00" level=debug msg="Made network namespace at /run/netns/netns-ca2332a8-ba7a-e980-636a-50c016800d1d for container ae0a056053dd5ad5a2effac7e82699f32fe04138febd524d2465eda1dd4568ea" time="2025-01-07T09:12:41-05:00" level=debug msg="Created root filesystem for container ae0a056053dd5ad5a2effac7e82699f32fe04138febd524d2465eda1dd4568ea at /var/lib/containers/storage/overlay/281ea9e93260ff02b0254fe28758f17f91023cd829478d2bd5882af4927740e2/merged" [DEBUG netavark::network::validation] Validating network namespace... [DEBUG netavark::commands::setup] Setting up... [INFO netavark::firewall] Using nftables firewall driver [DEBUG netavark::network::bridge] Setup network podman-default-kube-network [DEBUG netavark::network::bridge] Container interface name: eth0 with IP addresses [10.89.0.2/24] [DEBUG netavark::network::bridge] Bridge name: podman1 with IP addresses [10.89.0.1/24] [DEBUG netavark::network::core_utils] Setting sysctl value for net.ipv4.ip_forward to 1 [DEBUG netavark::network::core_utils] Setting sysctl value for /proc/sys/net/ipv4/conf/podman1/rp_filter to 2 [DEBUG netavark::network::core_utils] Setting sysctl value for /proc/sys/net/ipv6/conf/eth0/autoconf to 0 [DEBUG netavark::network::core_utils] Setting sysctl value for /proc/sys/net/ipv4/conf/eth0/arp_notify to 1 [DEBUG netavark::network::core_utils] Setting sysctl value for /proc/sys/net/ipv4/conf/eth0/rp_filter to 2 [INFO netavark::network::netlink] Adding route (dest: 0.0.0.0/0 ,gw: 10.89.0.1, metric 100) [DEBUG netavark::firewall::firewalld] Adding firewalld rules for network 10.89.0.0/24 [DEBUG netavark::firewall::firewalld] Adding subnet 10.89.0.0/24 to zone trusted as source [INFO netavark::firewall::nft] Creating container chain nv_fe17f964_10_89_0_0_nm24 [DEBUG netavark::network::core_utils] Setting sysctl value for net.ipv4.conf.podman1.route_localnet to 1 [DEBUG netavark::dns::aardvark] Spawning aardvark server [DEBUG netavark::dns::aardvark] start aardvark-dns: ["systemd-run", "-q", "--scope", "/usr/libexec/podman/aardvark-dns", "--config", "/run/containers/networks/aardvark-dns", "-p", "53", "run"] [DEBUG netavark::commands::setup] { "podman-default-kube-network": StatusBlock { dns_search_domains: Some( [ "dns.podman", ], ), dns_server_ips: Some( [ 10.89.0.1, ], ), interfaces: Some( { "eth0": NetInterface { mac_address: "3e:a5:aa:c9:ad:84", subnets: Some( [ NetAddress { gateway: Some( 10.89.0.1, ), ipnet: 10.89.0.2/24, }, ], ), }, }, ), }, } [DEBUG netavark::commands::setup] Setup complete time="2025-01-07T09:12:41-05:00" level=debug msg="/proc/sys/crypto/fips_enabled does not contain '1', not adding FIPS mode bind mounts" time="2025-01-07T09:12:41-05:00" level=debug msg="Setting Cgroups for container ae0a056053dd5ad5a2effac7e82699f32fe04138febd524d2465eda1dd4568ea to machine-libpod_pod_8f0fc920afcf00e279a9b03adf320d9dc3479f7fd5a25611aee0e7dfa8dddc68.slice:libpod:ae0a056053dd5ad5a2effac7e82699f32fe04138febd524d2465eda1dd4568ea" time="2025-01-07T09:12:41-05:00" level=debug msg="reading hooks from /usr/share/containers/oci/hooks.d" time="2025-01-07T09:12:41-05:00" level=debug msg="Workdir \"/\" resolved to host path \"/var/lib/containers/storage/overlay/281ea9e93260ff02b0254fe28758f17f91023cd829478d2bd5882af4927740e2/merged\"" time="2025-01-07T09:12:41-05:00" level=debug msg="Created OCI spec for container ae0a056053dd5ad5a2effac7e82699f32fe04138febd524d2465eda1dd4568ea at /var/lib/containers/storage/overlay-containers/ae0a056053dd5ad5a2effac7e82699f32fe04138febd524d2465eda1dd4568ea/userdata/config.json" time="2025-01-07T09:12:41-05:00" level=debug msg="Created cgroup path machine.slice/machine-libpod_pod_8f0fc920afcf00e279a9b03adf320d9dc3479f7fd5a25611aee0e7dfa8dddc68.slice for parent machine.slice and name libpod_pod_8f0fc920afcf00e279a9b03adf320d9dc3479f7fd5a25611aee0e7dfa8dddc68" time="2025-01-07T09:12:41-05:00" level=debug msg="Created cgroup machine.slice/machine-libpod_pod_8f0fc920afcf00e279a9b03adf320d9dc3479f7fd5a25611aee0e7dfa8dddc68.slice" time="2025-01-07T09:12:41-05:00" level=debug msg="Got pod cgroup as machine.slice/machine-libpod_pod_8f0fc920afcf00e279a9b03adf320d9dc3479f7fd5a25611aee0e7dfa8dddc68.slice" time="2025-01-07T09:12:41-05:00" level=debug msg="/usr/bin/conmon messages will be logged to syslog" time="2025-01-07T09:12:41-05:00" level=debug msg="running conmon: /usr/bin/conmon" args="[--api-version 1 -c ae0a056053dd5ad5a2effac7e82699f32fe04138febd524d2465eda1dd4568ea -u ae0a056053dd5ad5a2effac7e82699f32fe04138febd524d2465eda1dd4568ea -r /usr/bin/crun -b /var/lib/containers/storage/overlay-containers/ae0a056053dd5ad5a2effac7e82699f32fe04138febd524d2465eda1dd4568ea/userdata -p /run/containers/storage/overlay-containers/ae0a056053dd5ad5a2effac7e82699f32fe04138febd524d2465eda1dd4568ea/userdata/pidfile -n 8f0fc920afcf-infra --exit-dir /run/libpod/exits --persist-dir /run/libpod/persist/ae0a056053dd5ad5a2effac7e82699f32fe04138febd524d2465eda1dd4568ea --full-attach -s -l k8s-file:/var/lib/containers/storage/overlay-containers/ae0a056053dd5ad5a2effac7e82699f32fe04138febd524d2465eda1dd4568ea/userdata/ctr.log --log-level debug --syslog --conmon-pidfile /run/containers/storage/overlay-containers/ae0a056053dd5ad5a2effac7e82699f32fe04138febd524d2465eda1dd4568ea/userdata/conmon.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /var/lib/containers/storage --exit-command-arg --runroot --exit-command-arg /run/containers/storage --exit-command-arg --log-level --exit-command-arg debug --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/libpod --exit-command-arg --network-config-dir --exit-command-arg --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /var/lib/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --storage-opt --exit-command-arg overlay.mountopt=nodev,metacopy=on --exit-command-arg --events-backend --exit-command-arg journald --exit-command-arg --syslog --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg ae0a056053dd5ad5a2effac7e82699f32fe04138febd524d2465eda1dd4568ea]" time="2025-01-07T09:12:41-05:00" level=info msg="Running conmon under slice machine-libpod_pod_8f0fc920afcf00e279a9b03adf320d9dc3479f7fd5a25611aee0e7dfa8dddc68.slice and unitName libpod-conmon-ae0a056053dd5ad5a2effac7e82699f32fe04138febd524d2465eda1dd4568ea.scope" time="2025-01-07T09:12:41-05:00" level=debug msg="Received: 26514" time="2025-01-07T09:12:41-05:00" level=info msg="Got Conmon PID as 26512" time="2025-01-07T09:12:41-05:00" level=debug msg="Created container ae0a056053dd5ad5a2effac7e82699f32fe04138febd524d2465eda1dd4568ea in OCI runtime" time="2025-01-07T09:12:41-05:00" level=debug msg="Adding nameserver(s) from network status of '[\"10.89.0.1\"]'" time="2025-01-07T09:12:41-05:00" level=debug msg="Adding search domain(s) from network status of '[\"dns.podman\"]'" time="2025-01-07T09:12:41-05:00" level=debug msg="Starting container ae0a056053dd5ad5a2effac7e82699f32fe04138febd524d2465eda1dd4568ea with command [/catatonit -P]" time="2025-01-07T09:12:41-05:00" level=debug msg="Started container ae0a056053dd5ad5a2effac7e82699f32fe04138febd524d2465eda1dd4568ea" time="2025-01-07T09:12:41-05:00" level=debug msg="overlay: mount_data=lowerdir=/var/lib/containers/storage/overlay/l/CXSX6DXW3XYBB4KCHC4NKHQO5P,upperdir=/var/lib/containers/storage/overlay/06c7300d6e50696d3353ec47fe77f8673402472a4e676d4268c9f47b0148eed3/diff,workdir=/var/lib/containers/storage/overlay/06c7300d6e50696d3353ec47fe77f8673402472a4e676d4268c9f47b0148eed3/work,nodev,metacopy=on,context=\"system_u:object_r:container_file_t:s0:c720,c783\"" time="2025-01-07T09:12:41-05:00" level=debug msg="Mounted container \"c681c2767734a4dfc45b77adb3f727c394f2eb04b4e1f9b759299b366d1e80c3\" at \"/var/lib/containers/storage/overlay/06c7300d6e50696d3353ec47fe77f8673402472a4e676d4268c9f47b0148eed3/merged\"" time="2025-01-07T09:12:41-05:00" level=debug msg="Created root filesystem for container c681c2767734a4dfc45b77adb3f727c394f2eb04b4e1f9b759299b366d1e80c3 at /var/lib/containers/storage/overlay/06c7300d6e50696d3353ec47fe77f8673402472a4e676d4268c9f47b0148eed3/merged" time="2025-01-07T09:12:41-05:00" level=debug msg="/proc/sys/crypto/fips_enabled does not contain '1', not adding FIPS mode bind mounts" time="2025-01-07T09:12:41-05:00" level=debug msg="Setting Cgroups for container c681c2767734a4dfc45b77adb3f727c394f2eb04b4e1f9b759299b366d1e80c3 to machine-libpod_pod_8f0fc920afcf00e279a9b03adf320d9dc3479f7fd5a25611aee0e7dfa8dddc68.slice:libpod:c681c2767734a4dfc45b77adb3f727c394f2eb04b4e1f9b759299b366d1e80c3" time="2025-01-07T09:12:41-05:00" level=debug msg="reading hooks from /usr/share/containers/oci/hooks.d" time="2025-01-07T09:12:41-05:00" level=debug msg="Workdir \"/var/www\" resolved to a volume or mount" time="2025-01-07T09:12:41-05:00" level=debug msg="Created OCI spec for container c681c2767734a4dfc45b77adb3f727c394f2eb04b4e1f9b759299b366d1e80c3 at /var/lib/containers/storage/overlay-containers/c681c2767734a4dfc45b77adb3f727c394f2eb04b4e1f9b759299b366d1e80c3/userdata/config.json" time="2025-01-07T09:12:41-05:00" level=debug msg="Created cgroup path machine.slice/machine-libpod_pod_8f0fc920afcf00e279a9b03adf320d9dc3479f7fd5a25611aee0e7dfa8dddc68.slice for parent machine.slice and name libpod_pod_8f0fc920afcf00e279a9b03adf320d9dc3479f7fd5a25611aee0e7dfa8dddc68" time="2025-01-07T09:12:41-05:00" level=debug msg="Created cgroup machine.slice/machine-libpod_pod_8f0fc920afcf00e279a9b03adf320d9dc3479f7fd5a25611aee0e7dfa8dddc68.slice" time="2025-01-07T09:12:41-05:00" level=debug msg="Got pod cgroup as machine.slice/machine-libpod_pod_8f0fc920afcf00e279a9b03adf320d9dc3479f7fd5a25611aee0e7dfa8dddc68.slice" time="2025-01-07T09:12:41-05:00" level=debug msg="/usr/bin/conmon messages will be logged to syslog" time="2025-01-07T09:12:41-05:00" level=debug msg="running conmon: /usr/bin/conmon" args="[--api-version 1 -c c681c2767734a4dfc45b77adb3f727c394f2eb04b4e1f9b759299b366d1e80c3 -u c681c2767734a4dfc45b77adb3f727c394f2eb04b4e1f9b759299b366d1e80c3 -r /usr/bin/crun -b /var/lib/containers/storage/overlay-containers/c681c2767734a4dfc45b77adb3f727c394f2eb04b4e1f9b759299b366d1e80c3/userdata -p /run/containers/storage/overlay-containers/c681c2767734a4dfc45b77adb3f727c394f2eb04b4e1f9b759299b366d1e80c3/userdata/pidfile -n httpd2-httpd2 --exit-dir /run/libpod/exits --persist-dir /run/libpod/persist/c681c2767734a4dfc45b77adb3f727c394f2eb04b4e1f9b759299b366d1e80c3 --full-attach -s -l k8s-file:/var/lib/containers/storage/overlay-containers/c681c2767734a4dfc45b77adb3f727c394f2eb04b4e1f9b759299b366d1e80c3/userdata/ctr.log --log-level debug --syslog --conmon-pidfile /run/containers/storage/overlay-containers/c681c2767734a4dfc45b77adb3f727c394f2eb04b4e1f9b759299b366d1e80c3/userdata/conmon.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /var/lib/containers/storage --exit-command-arg --runroot --exit-command-arg /run/containers/storage --exit-command-arg --log-level --exit-command-arg debug --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/libpod --exit-command-arg --network-config-dir --exit-command-arg --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /var/lib/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --storage-opt --exit-command-arg overlay.mountopt=nodev,metacopy=on --exit-command-arg --events-backend --exit-command-arg journald --exit-command-arg --syslog --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg c681c2767734a4dfc45b77adb3f727c394f2eb04b4e1f9b759299b366d1e80c3]" time="2025-01-07T09:12:41-05:00" level=info msg="Running conmon under slice machine-libpod_pod_8f0fc920afcf00e279a9b03adf320d9dc3479f7fd5a25611aee0e7dfa8dddc68.slice and unitName libpod-conmon-c681c2767734a4dfc45b77adb3f727c394f2eb04b4e1f9b759299b366d1e80c3.scope" time="2025-01-07T09:12:41-05:00" level=debug msg="Received: 26519" time="2025-01-07T09:12:41-05:00" level=info msg="Got Conmon PID as 26517" time="2025-01-07T09:12:41-05:00" level=debug msg="Created container c681c2767734a4dfc45b77adb3f727c394f2eb04b4e1f9b759299b366d1e80c3 in OCI runtime" time="2025-01-07T09:12:41-05:00" level=debug msg="Starting container c681c2767734a4dfc45b77adb3f727c394f2eb04b4e1f9b759299b366d1e80c3 with command [/bin/busybox-extras httpd -f -p 80]" time="2025-01-07T09:12:41-05:00" level=debug msg="Started container c681c2767734a4dfc45b77adb3f727c394f2eb04b4e1f9b759299b366d1e80c3" time="2025-01-07T09:12:41-05:00" level=debug msg="Called kube.PersistentPostRunE(/usr/bin/podman play kube --start=true --log-level=debug /etc/containers/ansible-kubernetes.d/httpd2.yml)" time="2025-01-07T09:12:41-05:00" level=debug msg="Shutting down engines" time="2025-01-07T09:12:41-05:00" level=info msg="Received shutdown.Stop(), terminating!" PID=26456 Jan 07 09:12:41 managed-node2 python3.12[26449]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE rc: 0 Jan 07 09:12:42 managed-node2 python3.12[26651]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 07 09:12:42 managed-node2 systemd[1]: Reload requested from client PID 26652 ('systemctl') (unit session-6.scope)... Jan 07 09:12:42 managed-node2 systemd[1]: Reloading... Jan 07 09:12:42 managed-node2 systemd[1]: Reloading finished in 198 ms. Jan 07 09:12:43 managed-node2 python3.12[26838]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service scope=system enabled=True daemon_reload=False daemon_reexec=False no_block=False state=None force=None masked=None Jan 07 09:12:43 managed-node2 systemd[1]: Reload requested from client PID 26841 ('systemctl') (unit session-6.scope)... Jan 07 09:12:43 managed-node2 systemd[1]: Reloading... Jan 07 09:12:43 managed-node2 systemd[1]: Reloading finished in 199 ms. Jan 07 09:12:43 managed-node2 systemd[1]: Starting logrotate.service - Rotate log files... ░░ Subject: A start job for unit logrotate.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit logrotate.service has begun execution. ░░ ░░ The job identifier is 2275. Jan 07 09:12:43 managed-node2 systemd[1]: logrotate.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit logrotate.service has successfully entered the 'dead' state. Jan 07 09:12:43 managed-node2 systemd[1]: Finished logrotate.service - Rotate log files. ░░ Subject: A start job for unit logrotate.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit logrotate.service has finished successfully. ░░ ░░ The job identifier is 2275. Jan 07 09:12:44 managed-node2 python3.12[27031]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Jan 07 09:12:44 managed-node2 systemd[1]: Created slice system-podman\x2dkube.slice - Slice /system/podman-kube. ░░ Subject: A start job for unit system-podman\x2dkube.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit system-podman\x2dkube.slice has finished successfully. ░░ ░░ The job identifier is 2356. Jan 07 09:12:44 managed-node2 systemd[1]: Starting podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service - A template for running K8s workloads via podman-kube-play... ░░ Subject: A start job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service has begun execution. ░░ ░░ The job identifier is 2353. Jan 07 09:12:44 managed-node2 podman[27035]: 2025-01-07 09:12:44.111035063 -0500 EST m=+0.026022472 pod stop 8f0fc920afcf00e279a9b03adf320d9dc3479f7fd5a25611aee0e7dfa8dddc68 (image=, name=httpd2) Jan 07 09:12:44 managed-node2 systemd[1]: libpod-ae0a056053dd5ad5a2effac7e82699f32fe04138febd524d2465eda1dd4568ea.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-ae0a056053dd5ad5a2effac7e82699f32fe04138febd524d2465eda1dd4568ea.scope has successfully entered the 'dead' state. Jan 07 09:12:44 managed-node2 podman[27035]: 2025-01-07 09:12:44.130046795 -0500 EST m=+0.045034296 container died ae0a056053dd5ad5a2effac7e82699f32fe04138febd524d2465eda1dd4568ea (image=localhost/podman-pause:5.3.1-1733097600, name=8f0fc920afcf-infra, io.buildah.version=1.38.0) Jan 07 09:12:44 managed-node2 aardvark-dns[26498]: Received SIGHUP Jan 07 09:12:44 managed-node2 aardvark-dns[26498]: Successfully parsed config Jan 07 09:12:44 managed-node2 aardvark-dns[26498]: Listen v4 ip {} Jan 07 09:12:44 managed-node2 aardvark-dns[26498]: Listen v6 ip {} Jan 07 09:12:44 managed-node2 aardvark-dns[26498]: No configuration found stopping the sever Jan 07 09:12:44 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Jan 07 09:12:44 managed-node2 kernel: veth0 (unregistering): left allmulticast mode Jan 07 09:12:44 managed-node2 kernel: veth0 (unregistering): left promiscuous mode Jan 07 09:12:44 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Jan 07 09:12:44 managed-node2 systemd[1]: run-rf869b629f5524c4e8aba6919e24741dc.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-rf869b629f5524c4e8aba6919e24741dc.scope has successfully entered the 'dead' state. Jan 07 09:12:44 managed-node2 /usr/bin/podman[27046]: time="2025-01-07T09:12:44-05:00" level=debug msg="Called cleanup.PersistentPreRunE(/usr/bin/podman --root /var/lib/containers/storage --runroot /run/containers/storage --log-level debug --cgroup-manager systemd --tmpdir /run/libpod --network-config-dir --network-backend netavark --volumepath /var/lib/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime crun --storage-driver overlay --storage-opt overlay.mountopt=nodev,metacopy=on --events-backend journald --syslog container cleanup --stopped-only ae0a056053dd5ad5a2effac7e82699f32fe04138febd524d2465eda1dd4568ea)" Jan 07 09:12:44 managed-node2 /usr/bin/podman[27046]: time="2025-01-07T09:12:44-05:00" level=debug msg="Setting custom database backend: \"sqlite\"" Jan 07 09:12:44 managed-node2 /usr/bin/podman[27046]: time="2025-01-07T09:12:44-05:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" Jan 07 09:12:44 managed-node2 /usr/bin/podman[27046]: time="2025-01-07T09:12:44-05:00" level=info msg="Using sqlite as database backend" Jan 07 09:12:44 managed-node2 /usr/bin/podman[27046]: time="2025-01-07T09:12:44-05:00" level=debug msg="Using graph driver overlay" Jan 07 09:12:44 managed-node2 /usr/bin/podman[27046]: time="2025-01-07T09:12:44-05:00" level=debug msg="Using graph root /var/lib/containers/storage" Jan 07 09:12:44 managed-node2 /usr/bin/podman[27046]: time="2025-01-07T09:12:44-05:00" level=debug msg="Using run root /run/containers/storage" Jan 07 09:12:44 managed-node2 /usr/bin/podman[27046]: time="2025-01-07T09:12:44-05:00" level=debug msg="Using static dir /var/lib/containers/storage/libpod" Jan 07 09:12:44 managed-node2 /usr/bin/podman[27046]: time="2025-01-07T09:12:44-05:00" level=debug msg="Using tmp dir /run/libpod" Jan 07 09:12:44 managed-node2 /usr/bin/podman[27046]: time="2025-01-07T09:12:44-05:00" level=debug msg="Using volume path /var/lib/containers/storage/volumes" Jan 07 09:12:44 managed-node2 /usr/bin/podman[27046]: time="2025-01-07T09:12:44-05:00" level=debug msg="Using transient store: false" Jan 07 09:12:44 managed-node2 /usr/bin/podman[27046]: time="2025-01-07T09:12:44-05:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" Jan 07 09:12:44 managed-node2 /usr/bin/podman[27046]: time="2025-01-07T09:12:44-05:00" level=debug msg="Cached value indicated that overlay is supported" Jan 07 09:12:44 managed-node2 /usr/bin/podman[27046]: time="2025-01-07T09:12:44-05:00" level=debug msg="Cached value indicated that overlay is supported" Jan 07 09:12:44 managed-node2 /usr/bin/podman[27046]: time="2025-01-07T09:12:44-05:00" level=debug msg="Cached value indicated that metacopy is being used" Jan 07 09:12:44 managed-node2 /usr/bin/podman[27046]: time="2025-01-07T09:12:44-05:00" level=debug msg="Cached value indicated that native-diff is not being used" Jan 07 09:12:44 managed-node2 /usr/bin/podman[27046]: time="2025-01-07T09:12:44-05:00" level=info msg="Not using native diff for overlay, this may cause degraded performance for building images: kernel has CONFIG_OVERLAY_FS_REDIRECT_DIR enabled" Jan 07 09:12:44 managed-node2 /usr/bin/podman[27046]: time="2025-01-07T09:12:44-05:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=false, usingMetacopy=true" Jan 07 09:12:44 managed-node2 /usr/bin/podman[27046]: time="2025-01-07T09:12:44-05:00" level=debug msg="Initializing event backend journald" Jan 07 09:12:44 managed-node2 /usr/bin/podman[27046]: time="2025-01-07T09:12:44-05:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" Jan 07 09:12:44 managed-node2 /usr/bin/podman[27046]: time="2025-01-07T09:12:44-05:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" Jan 07 09:12:44 managed-node2 /usr/bin/podman[27046]: time="2025-01-07T09:12:44-05:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" Jan 07 09:12:44 managed-node2 /usr/bin/podman[27046]: time="2025-01-07T09:12:44-05:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" Jan 07 09:12:44 managed-node2 /usr/bin/podman[27046]: time="2025-01-07T09:12:44-05:00" level=debug msg="Configured OCI runtime crun-vm initialization failed: no valid executable found for OCI runtime crun-vm: invalid argument" Jan 07 09:12:44 managed-node2 /usr/bin/podman[27046]: time="2025-01-07T09:12:44-05:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" Jan 07 09:12:44 managed-node2 /usr/bin/podman[27046]: time="2025-01-07T09:12:44-05:00" level=debug msg="Configured OCI runtime runc initialization failed: no valid executable found for OCI runtime runc: invalid argument" Jan 07 09:12:44 managed-node2 /usr/bin/podman[27046]: time="2025-01-07T09:12:44-05:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" Jan 07 09:12:44 managed-node2 /usr/bin/podman[27046]: time="2025-01-07T09:12:44-05:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" Jan 07 09:12:44 managed-node2 /usr/bin/podman[27046]: time="2025-01-07T09:12:44-05:00" level=debug msg="Using OCI runtime \"/usr/bin/crun\"" Jan 07 09:12:44 managed-node2 /usr/bin/podman[27046]: time="2025-01-07T09:12:44-05:00" level=info msg="Setting parallel job count to 7" Jan 07 09:12:44 managed-node2 NetworkManager[780]: [1736259164.1738] device (podman1): state change: activated -> unmanaged (reason 'unmanaged', managed-type: 'removed') Jan 07 09:12:44 managed-node2 systemd[1]: run-netns-netns\x2dca2332a8\x2dba7a\x2de980\x2d636a\x2d50c016800d1d.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-netns-netns\x2dca2332a8\x2dba7a\x2de980\x2d636a\x2d50c016800d1d.mount has successfully entered the 'dead' state. Jan 07 09:12:44 managed-node2 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-ae0a056053dd5ad5a2effac7e82699f32fe04138febd524d2465eda1dd4568ea-userdata-shm.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay\x2dcontainers-ae0a056053dd5ad5a2effac7e82699f32fe04138febd524d2465eda1dd4568ea-userdata-shm.mount has successfully entered the 'dead' state. Jan 07 09:12:44 managed-node2 systemd[1]: var-lib-containers-storage-overlay-281ea9e93260ff02b0254fe28758f17f91023cd829478d2bd5882af4927740e2-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-281ea9e93260ff02b0254fe28758f17f91023cd829478d2bd5882af4927740e2-merged.mount has successfully entered the 'dead' state. Jan 07 09:12:44 managed-node2 podman[27035]: 2025-01-07 09:12:44.25742555 -0500 EST m=+0.172412876 container cleanup ae0a056053dd5ad5a2effac7e82699f32fe04138febd524d2465eda1dd4568ea (image=localhost/podman-pause:5.3.1-1733097600, name=8f0fc920afcf-infra, pod_id=8f0fc920afcf00e279a9b03adf320d9dc3479f7fd5a25611aee0e7dfa8dddc68, io.buildah.version=1.38.0) Jan 07 09:12:44 managed-node2 /usr/bin/podman[27046]: time="2025-01-07T09:12:44-05:00" level=debug msg="Called cleanup.PersistentPostRunE(/usr/bin/podman --root /var/lib/containers/storage --runroot /run/containers/storage --log-level debug --cgroup-manager systemd --tmpdir /run/libpod --network-config-dir --network-backend netavark --volumepath /var/lib/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime crun --storage-driver overlay --storage-opt overlay.mountopt=nodev,metacopy=on --events-backend journald --syslog container cleanup --stopped-only ae0a056053dd5ad5a2effac7e82699f32fe04138febd524d2465eda1dd4568ea)" Jan 07 09:12:44 managed-node2 /usr/bin/podman[27046]: time="2025-01-07T09:12:44-05:00" level=debug msg="Shutting down engines" Jan 07 09:12:44 managed-node2 systemd[1]: libpod-conmon-ae0a056053dd5ad5a2effac7e82699f32fe04138febd524d2465eda1dd4568ea.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-conmon-ae0a056053dd5ad5a2effac7e82699f32fe04138febd524d2465eda1dd4568ea.scope has successfully entered the 'dead' state. Jan 07 09:12:54 managed-node2 podman[27035]: time="2025-01-07T09:12:54-05:00" level=warning msg="StopSignal SIGTERM failed to stop container httpd2-httpd2 in 10 seconds, resorting to SIGKILL" Jan 07 09:12:54 managed-node2 conmon[26517]: conmon c681c2767734a4dfc45b : container 26519 exited with status 137 Jan 07 09:12:54 managed-node2 systemd[1]: libpod-c681c2767734a4dfc45b77adb3f727c394f2eb04b4e1f9b759299b366d1e80c3.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-c681c2767734a4dfc45b77adb3f727c394f2eb04b4e1f9b759299b366d1e80c3.scope has successfully entered the 'dead' state. Jan 07 09:12:54 managed-node2 conmon[26517]: conmon c681c2767734a4dfc45b : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_8f0fc920afcf00e279a9b03adf320d9dc3479f7fd5a25611aee0e7dfa8dddc68.slice/libpod-c681c2767734a4dfc45b77adb3f727c394f2eb04b4e1f9b759299b366d1e80c3.scope/container/memory.events Jan 07 09:12:54 managed-node2 podman[27035]: 2025-01-07 09:12:54.153206906 -0500 EST m=+10.068194509 container died c681c2767734a4dfc45b77adb3f727c394f2eb04b4e1f9b759299b366d1e80c3 (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jan 07 09:12:54 managed-node2 /usr/bin/podman[27068]: time="2025-01-07T09:12:54-05:00" level=debug msg="Called cleanup.PersistentPreRunE(/usr/bin/podman --root /var/lib/containers/storage --runroot /run/containers/storage --log-level debug --cgroup-manager systemd --tmpdir /run/libpod --network-config-dir --network-backend netavark --volumepath /var/lib/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime crun --storage-driver overlay --storage-opt overlay.mountopt=nodev,metacopy=on --events-backend journald --syslog container cleanup --stopped-only c681c2767734a4dfc45b77adb3f727c394f2eb04b4e1f9b759299b366d1e80c3)" Jan 07 09:12:54 managed-node2 /usr/bin/podman[27068]: time="2025-01-07T09:12:54-05:00" level=debug msg="Setting custom database backend: \"sqlite\"" Jan 07 09:12:54 managed-node2 /usr/bin/podman[27068]: time="2025-01-07T09:12:54-05:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" Jan 07 09:12:54 managed-node2 /usr/bin/podman[27068]: time="2025-01-07T09:12:54-05:00" level=info msg="Using sqlite as database backend" Jan 07 09:12:54 managed-node2 /usr/bin/podman[27068]: time="2025-01-07T09:12:54-05:00" level=debug msg="Using graph driver overlay" Jan 07 09:12:54 managed-node2 /usr/bin/podman[27068]: time="2025-01-07T09:12:54-05:00" level=debug msg="Using graph root /var/lib/containers/storage" Jan 07 09:12:54 managed-node2 /usr/bin/podman[27068]: time="2025-01-07T09:12:54-05:00" level=debug msg="Using run root /run/containers/storage" Jan 07 09:12:54 managed-node2 /usr/bin/podman[27068]: time="2025-01-07T09:12:54-05:00" level=debug msg="Using static dir /var/lib/containers/storage/libpod" Jan 07 09:12:54 managed-node2 /usr/bin/podman[27068]: time="2025-01-07T09:12:54-05:00" level=debug msg="Using tmp dir /run/libpod" Jan 07 09:12:54 managed-node2 /usr/bin/podman[27068]: time="2025-01-07T09:12:54-05:00" level=debug msg="Using volume path /var/lib/containers/storage/volumes" Jan 07 09:12:54 managed-node2 /usr/bin/podman[27068]: time="2025-01-07T09:12:54-05:00" level=debug msg="Using transient store: false" Jan 07 09:12:54 managed-node2 /usr/bin/podman[27068]: time="2025-01-07T09:12:54-05:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" Jan 07 09:12:54 managed-node2 /usr/bin/podman[27068]: time="2025-01-07T09:12:54-05:00" level=debug msg="Cached value indicated that overlay is supported" Jan 07 09:12:54 managed-node2 /usr/bin/podman[27068]: time="2025-01-07T09:12:54-05:00" level=debug msg="Cached value indicated that overlay is supported" Jan 07 09:12:54 managed-node2 /usr/bin/podman[27068]: time="2025-01-07T09:12:54-05:00" level=debug msg="Cached value indicated that metacopy is being used" Jan 07 09:12:54 managed-node2 /usr/bin/podman[27068]: time="2025-01-07T09:12:54-05:00" level=debug msg="Cached value indicated that native-diff is not being used" Jan 07 09:12:54 managed-node2 /usr/bin/podman[27068]: time="2025-01-07T09:12:54-05:00" level=info msg="Not using native diff for overlay, this may cause degraded performance for building images: kernel has CONFIG_OVERLAY_FS_REDIRECT_DIR enabled" Jan 07 09:12:54 managed-node2 /usr/bin/podman[27068]: time="2025-01-07T09:12:54-05:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=false, usingMetacopy=true" Jan 07 09:12:54 managed-node2 /usr/bin/podman[27068]: time="2025-01-07T09:12:54-05:00" level=debug msg="Initializing event backend journald" Jan 07 09:12:54 managed-node2 /usr/bin/podman[27068]: time="2025-01-07T09:12:54-05:00" level=debug msg="Configured OCI runtime crun-vm initialization failed: no valid executable found for OCI runtime crun-vm: invalid argument" Jan 07 09:12:54 managed-node2 /usr/bin/podman[27068]: time="2025-01-07T09:12:54-05:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" Jan 07 09:12:54 managed-node2 /usr/bin/podman[27068]: time="2025-01-07T09:12:54-05:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" Jan 07 09:12:54 managed-node2 /usr/bin/podman[27068]: time="2025-01-07T09:12:54-05:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" Jan 07 09:12:54 managed-node2 /usr/bin/podman[27068]: time="2025-01-07T09:12:54-05:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" Jan 07 09:12:54 managed-node2 /usr/bin/podman[27068]: time="2025-01-07T09:12:54-05:00" level=debug msg="Configured OCI runtime runc initialization failed: no valid executable found for OCI runtime runc: invalid argument" Jan 07 09:12:54 managed-node2 /usr/bin/podman[27068]: time="2025-01-07T09:12:54-05:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" Jan 07 09:12:54 managed-node2 /usr/bin/podman[27068]: time="2025-01-07T09:12:54-05:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" Jan 07 09:12:54 managed-node2 /usr/bin/podman[27068]: time="2025-01-07T09:12:54-05:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" Jan 07 09:12:54 managed-node2 /usr/bin/podman[27068]: time="2025-01-07T09:12:54-05:00" level=debug msg="Using OCI runtime \"/usr/bin/crun\"" Jan 07 09:12:54 managed-node2 /usr/bin/podman[27068]: time="2025-01-07T09:12:54-05:00" level=info msg="Setting parallel job count to 7" Jan 07 09:12:54 managed-node2 systemd[1]: var-lib-containers-storage-overlay-06c7300d6e50696d3353ec47fe77f8673402472a4e676d4268c9f47b0148eed3-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-06c7300d6e50696d3353ec47fe77f8673402472a4e676d4268c9f47b0148eed3-merged.mount has successfully entered the 'dead' state. Jan 07 09:12:54 managed-node2 podman[27035]: 2025-01-07 09:12:54.197316357 -0500 EST m=+10.112303685 container cleanup c681c2767734a4dfc45b77adb3f727c394f2eb04b4e1f9b759299b366d1e80c3 (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=8f0fc920afcf00e279a9b03adf320d9dc3479f7fd5a25611aee0e7dfa8dddc68, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jan 07 09:12:54 managed-node2 /usr/bin/podman[27068]: time="2025-01-07T09:12:54-05:00" level=debug msg="Called cleanup.PersistentPostRunE(/usr/bin/podman --root /var/lib/containers/storage --runroot /run/containers/storage --log-level debug --cgroup-manager systemd --tmpdir /run/libpod --network-config-dir --network-backend netavark --volumepath /var/lib/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime crun --storage-driver overlay --storage-opt overlay.mountopt=nodev,metacopy=on --events-backend journald --syslog container cleanup --stopped-only c681c2767734a4dfc45b77adb3f727c394f2eb04b4e1f9b759299b366d1e80c3)" Jan 07 09:12:54 managed-node2 /usr/bin/podman[27068]: time="2025-01-07T09:12:54-05:00" level=debug msg="Shutting down engines" Jan 07 09:12:54 managed-node2 /usr/bin/podman[27068]: time="2025-01-07T09:12:54-05:00" level=info msg="Received shutdown.Stop(), terminating!" PID=27068 Jan 07 09:12:54 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 07 09:12:54 managed-node2 systemd[1]: libpod-conmon-c681c2767734a4dfc45b77adb3f727c394f2eb04b4e1f9b759299b366d1e80c3.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-conmon-c681c2767734a4dfc45b77adb3f727c394f2eb04b4e1f9b759299b366d1e80c3.scope has successfully entered the 'dead' state. Jan 07 09:12:54 managed-node2 systemd[1]: Stopped libpod-conmon-c681c2767734a4dfc45b77adb3f727c394f2eb04b4e1f9b759299b366d1e80c3.scope. ░░ Subject: A stop job for unit libpod-conmon-c681c2767734a4dfc45b77adb3f727c394f2eb04b4e1f9b759299b366d1e80c3.scope has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit libpod-conmon-c681c2767734a4dfc45b77adb3f727c394f2eb04b4e1f9b759299b366d1e80c3.scope has finished. ░░ ░░ The job identifier is 2439 and the job result is done. Jan 07 09:12:54 managed-node2 systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state. Jan 07 09:12:54 managed-node2 systemd[1]: Removed slice machine-libpod_pod_8f0fc920afcf00e279a9b03adf320d9dc3479f7fd5a25611aee0e7dfa8dddc68.slice - cgroup machine-libpod_pod_8f0fc920afcf00e279a9b03adf320d9dc3479f7fd5a25611aee0e7dfa8dddc68.slice. ░░ Subject: A stop job for unit machine-libpod_pod_8f0fc920afcf00e279a9b03adf320d9dc3479f7fd5a25611aee0e7dfa8dddc68.slice has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit machine-libpod_pod_8f0fc920afcf00e279a9b03adf320d9dc3479f7fd5a25611aee0e7dfa8dddc68.slice has finished. ░░ ░░ The job identifier is 2438 and the job result is done. Jan 07 09:12:54 managed-node2 podman[27035]: 2025-01-07 09:12:54.239866271 -0500 EST m=+10.154853663 pod stop 8f0fc920afcf00e279a9b03adf320d9dc3479f7fd5a25611aee0e7dfa8dddc68 (image=, name=httpd2) Jan 07 09:12:54 managed-node2 systemd[1]: machine-libpod_pod_8f0fc920afcf00e279a9b03adf320d9dc3479f7fd5a25611aee0e7dfa8dddc68.slice: Failed to open /run/systemd/transient/machine-libpod_pod_8f0fc920afcf00e279a9b03adf320d9dc3479f7fd5a25611aee0e7dfa8dddc68.slice: No such file or directory Jan 07 09:12:54 managed-node2 podman[27035]: 2025-01-07 09:12:54.248256619 -0500 EST m=+10.163244142 pod stop 8f0fc920afcf00e279a9b03adf320d9dc3479f7fd5a25611aee0e7dfa8dddc68 (image=, name=httpd2) Jan 07 09:12:54 managed-node2 systemd[1]: machine-libpod_pod_8f0fc920afcf00e279a9b03adf320d9dc3479f7fd5a25611aee0e7dfa8dddc68.slice: Failed to open /run/systemd/transient/machine-libpod_pod_8f0fc920afcf00e279a9b03adf320d9dc3479f7fd5a25611aee0e7dfa8dddc68.slice: No such file or directory Jan 07 09:12:54 managed-node2 podman[27035]: 2025-01-07 09:12:54.277677895 -0500 EST m=+10.192665225 container remove c681c2767734a4dfc45b77adb3f727c394f2eb04b4e1f9b759299b366d1e80c3 (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=8f0fc920afcf00e279a9b03adf320d9dc3479f7fd5a25611aee0e7dfa8dddc68, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jan 07 09:12:54 managed-node2 podman[27035]: 2025-01-07 09:12:54.300987047 -0500 EST m=+10.215974378 container remove ae0a056053dd5ad5a2effac7e82699f32fe04138febd524d2465eda1dd4568ea (image=localhost/podman-pause:5.3.1-1733097600, name=8f0fc920afcf-infra, pod_id=8f0fc920afcf00e279a9b03adf320d9dc3479f7fd5a25611aee0e7dfa8dddc68, io.buildah.version=1.38.0) Jan 07 09:12:54 managed-node2 systemd[1]: machine-libpod_pod_8f0fc920afcf00e279a9b03adf320d9dc3479f7fd5a25611aee0e7dfa8dddc68.slice: Failed to open /run/systemd/transient/machine-libpod_pod_8f0fc920afcf00e279a9b03adf320d9dc3479f7fd5a25611aee0e7dfa8dddc68.slice: No such file or directory Jan 07 09:12:54 managed-node2 podman[27035]: 2025-01-07 09:12:54.309692431 -0500 EST m=+10.224679763 pod remove 8f0fc920afcf00e279a9b03adf320d9dc3479f7fd5a25611aee0e7dfa8dddc68 (image=, name=httpd2) Jan 07 09:12:54 managed-node2 podman[27035]: Pods stopped: Jan 07 09:12:54 managed-node2 podman[27035]: 8f0fc920afcf00e279a9b03adf320d9dc3479f7fd5a25611aee0e7dfa8dddc68 Jan 07 09:12:54 managed-node2 podman[27035]: Pods removed: Jan 07 09:12:54 managed-node2 podman[27035]: 8f0fc920afcf00e279a9b03adf320d9dc3479f7fd5a25611aee0e7dfa8dddc68 Jan 07 09:12:54 managed-node2 podman[27035]: Secrets removed: Jan 07 09:12:54 managed-node2 podman[27035]: Volumes removed: Jan 07 09:12:54 managed-node2 podman[27035]: 2025-01-07 09:12:54.330810793 -0500 EST m=+10.245798192 container create 7b54a3800ffcaa03646d30c37320971be59ba4f6b989157b4e4ebf6595963962 (image=localhost/podman-pause:5.3.1-1733097600, name=5d7db1ec4fb5-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service) Jan 07 09:12:54 managed-node2 systemd[1]: Created slice machine-libpod_pod_5c56b3244e885d4446e136c2928e76f5420753b7acba55842894d035d2fa49a0.slice - cgroup machine-libpod_pod_5c56b3244e885d4446e136c2928e76f5420753b7acba55842894d035d2fa49a0.slice. ░░ Subject: A start job for unit machine-libpod_pod_5c56b3244e885d4446e136c2928e76f5420753b7acba55842894d035d2fa49a0.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit machine-libpod_pod_5c56b3244e885d4446e136c2928e76f5420753b7acba55842894d035d2fa49a0.slice has finished successfully. ░░ ░░ The job identifier is 2440. Jan 07 09:12:54 managed-node2 podman[27035]: 2025-01-07 09:12:54.379485088 -0500 EST m=+10.294472531 container create b8b3f35398f110339f901ee0df97db9680f63fa6b304d3b28412fe1a3ecb7db2 (image=localhost/podman-pause:5.3.1-1733097600, name=5c56b3244e88-infra, pod_id=5c56b3244e885d4446e136c2928e76f5420753b7acba55842894d035d2fa49a0, io.buildah.version=1.38.0, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service) Jan 07 09:12:54 managed-node2 podman[27035]: 2025-01-07 09:12:54.38382167 -0500 EST m=+10.298809084 pod create 5c56b3244e885d4446e136c2928e76f5420753b7acba55842894d035d2fa49a0 (image=, name=httpd2) Jan 07 09:12:54 managed-node2 podman[27035]: 2025-01-07 09:12:54.41146549 -0500 EST m=+10.326453040 container create 153c7d559088904444cbd10e5bba9a4723631858835088bca091378f6b60c161 (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=5c56b3244e885d4446e136c2928e76f5420753b7acba55842894d035d2fa49a0, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, app=test, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service) Jan 07 09:12:54 managed-node2 podman[27035]: 2025-01-07 09:12:54.411846857 -0500 EST m=+10.326834192 container restart 7b54a3800ffcaa03646d30c37320971be59ba4f6b989157b4e4ebf6595963962 (image=localhost/podman-pause:5.3.1-1733097600, name=5d7db1ec4fb5-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service) Jan 07 09:12:54 managed-node2 systemd[1]: Started libpod-7b54a3800ffcaa03646d30c37320971be59ba4f6b989157b4e4ebf6595963962.scope - libcrun container. ░░ Subject: A start job for unit libpod-7b54a3800ffcaa03646d30c37320971be59ba4f6b989157b4e4ebf6595963962.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-7b54a3800ffcaa03646d30c37320971be59ba4f6b989157b4e4ebf6595963962.scope has finished successfully. ░░ ░░ The job identifier is 2446. Jan 07 09:12:54 managed-node2 podman[27035]: 2025-01-07 09:12:54.385865098 -0500 EST m=+10.300852557 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610 Jan 07 09:12:54 managed-node2 podman[27035]: 2025-01-07 09:12:54.477339275 -0500 EST m=+10.392326692 container init 7b54a3800ffcaa03646d30c37320971be59ba4f6b989157b4e4ebf6595963962 (image=localhost/podman-pause:5.3.1-1733097600, name=5d7db1ec4fb5-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service) Jan 07 09:12:54 managed-node2 podman[27035]: 2025-01-07 09:12:54.479789417 -0500 EST m=+10.394776795 container start 7b54a3800ffcaa03646d30c37320971be59ba4f6b989157b4e4ebf6595963962 (image=localhost/podman-pause:5.3.1-1733097600, name=5d7db1ec4fb5-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service) Jan 07 09:12:54 managed-node2 NetworkManager[780]: [1736259174.4908] manager: (podman1): new Bridge device (/org/freedesktop/NetworkManager/Devices/5) Jan 07 09:12:54 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Jan 07 09:12:54 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Jan 07 09:12:54 managed-node2 kernel: veth0: entered allmulticast mode Jan 07 09:12:54 managed-node2 kernel: veth0: entered promiscuous mode Jan 07 09:12:54 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Jan 07 09:12:54 managed-node2 kernel: podman1: port 1(veth0) entered forwarding state Jan 07 09:12:54 managed-node2 NetworkManager[780]: [1736259174.5064] manager: (veth0): new Veth device (/org/freedesktop/NetworkManager/Devices/6) Jan 07 09:12:54 managed-node2 NetworkManager[780]: [1736259174.5077] device (veth0): carrier: link connected Jan 07 09:12:54 managed-node2 NetworkManager[780]: [1736259174.5082] device (podman1): carrier: link connected Jan 07 09:12:54 managed-node2 (udev-worker)[27089]: Network interface NamePolicy= disabled on kernel command line. Jan 07 09:12:54 managed-node2 (udev-worker)[27088]: Network interface NamePolicy= disabled on kernel command line. Jan 07 09:12:54 managed-node2 NetworkManager[780]: [1736259174.5468] device (podman1): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external') Jan 07 09:12:54 managed-node2 NetworkManager[780]: [1736259174.5477] device (podman1): state change: unavailable -> disconnected (reason 'connection-assumed', managed-type: 'external') Jan 07 09:12:54 managed-node2 NetworkManager[780]: [1736259174.5502] device (podman1): Activation: starting connection 'podman1' (4124c740-ce81-487f-85a8-9f75de9cacda) Jan 07 09:12:54 managed-node2 NetworkManager[780]: [1736259174.5507] device (podman1): state change: disconnected -> prepare (reason 'none', managed-type: 'external') Jan 07 09:12:54 managed-node2 NetworkManager[780]: [1736259174.5511] device (podman1): state change: prepare -> config (reason 'none', managed-type: 'external') Jan 07 09:12:54 managed-node2 NetworkManager[780]: [1736259174.5513] device (podman1): state change: config -> ip-config (reason 'none', managed-type: 'external') Jan 07 09:12:54 managed-node2 NetworkManager[780]: [1736259174.5517] device (podman1): state change: ip-config -> ip-check (reason 'none', managed-type: 'external') Jan 07 09:12:54 managed-node2 systemd[1]: Starting NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service... ░░ Subject: A start job for unit NetworkManager-dispatcher.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has begun execution. ░░ ░░ The job identifier is 2453. Jan 07 09:12:54 managed-node2 systemd[1]: Started run-r22f39ad3274e4348b99f7c15feb5c41c.scope - /usr/libexec/podman/aardvark-dns --config /run/containers/networks/aardvark-dns -p 53 run. ░░ Subject: A start job for unit run-r22f39ad3274e4348b99f7c15feb5c41c.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit run-r22f39ad3274e4348b99f7c15feb5c41c.scope has finished successfully. ░░ ░░ The job identifier is 2532. Jan 07 09:12:54 managed-node2 systemd[1]: Started NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service. ░░ Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has finished successfully. ░░ ░░ The job identifier is 2453. Jan 07 09:12:54 managed-node2 NetworkManager[780]: [1736259174.6074] device (podman1): state change: ip-check -> secondaries (reason 'none', managed-type: 'external') Jan 07 09:12:54 managed-node2 NetworkManager[780]: [1736259174.6077] device (podman1): state change: secondaries -> activated (reason 'none', managed-type: 'external') Jan 07 09:12:54 managed-node2 NetworkManager[780]: [1736259174.6084] device (podman1): Activation: successful, device activated. Jan 07 09:12:54 managed-node2 systemd[1]: Started libpod-b8b3f35398f110339f901ee0df97db9680f63fa6b304d3b28412fe1a3ecb7db2.scope - libcrun container. ░░ Subject: A start job for unit libpod-b8b3f35398f110339f901ee0df97db9680f63fa6b304d3b28412fe1a3ecb7db2.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-b8b3f35398f110339f901ee0df97db9680f63fa6b304d3b28412fe1a3ecb7db2.scope has finished successfully. ░░ ░░ The job identifier is 2538. Jan 07 09:12:54 managed-node2 podman[27035]: 2025-01-07 09:12:54.768555677 -0500 EST m=+10.683543065 container init b8b3f35398f110339f901ee0df97db9680f63fa6b304d3b28412fe1a3ecb7db2 (image=localhost/podman-pause:5.3.1-1733097600, name=5c56b3244e88-infra, pod_id=5c56b3244e885d4446e136c2928e76f5420753b7acba55842894d035d2fa49a0, io.buildah.version=1.38.0, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service) Jan 07 09:12:54 managed-node2 podman[27035]: 2025-01-07 09:12:54.771841771 -0500 EST m=+10.686829111 container start b8b3f35398f110339f901ee0df97db9680f63fa6b304d3b28412fe1a3ecb7db2 (image=localhost/podman-pause:5.3.1-1733097600, name=5c56b3244e88-infra, pod_id=5c56b3244e885d4446e136c2928e76f5420753b7acba55842894d035d2fa49a0, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service, io.buildah.version=1.38.0) Jan 07 09:12:54 managed-node2 systemd[1]: Started libpod-153c7d559088904444cbd10e5bba9a4723631858835088bca091378f6b60c161.scope - libcrun container. ░░ Subject: A start job for unit libpod-153c7d559088904444cbd10e5bba9a4723631858835088bca091378f6b60c161.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-153c7d559088904444cbd10e5bba9a4723631858835088bca091378f6b60c161.scope has finished successfully. ░░ ░░ The job identifier is 2545. Jan 07 09:12:54 managed-node2 podman[27035]: 2025-01-07 09:12:54.811050874 -0500 EST m=+10.726038235 container init 153c7d559088904444cbd10e5bba9a4723631858835088bca091378f6b60c161 (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=5c56b3244e885d4446e136c2928e76f5420753b7acba55842894d035d2fa49a0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jan 07 09:12:54 managed-node2 podman[27035]: 2025-01-07 09:12:54.813606683 -0500 EST m=+10.728594182 container start 153c7d559088904444cbd10e5bba9a4723631858835088bca091378f6b60c161 (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=5c56b3244e885d4446e136c2928e76f5420753b7acba55842894d035d2fa49a0, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jan 07 09:12:54 managed-node2 podman[27035]: 2025-01-07 09:12:54.818224788 -0500 EST m=+10.733212116 pod start 5c56b3244e885d4446e136c2928e76f5420753b7acba55842894d035d2fa49a0 (image=, name=httpd2) Jan 07 09:12:54 managed-node2 podman[27035]: Pod: Jan 07 09:12:54 managed-node2 podman[27035]: 5c56b3244e885d4446e136c2928e76f5420753b7acba55842894d035d2fa49a0 Jan 07 09:12:54 managed-node2 podman[27035]: Container: Jan 07 09:12:54 managed-node2 podman[27035]: 153c7d559088904444cbd10e5bba9a4723631858835088bca091378f6b60c161 Jan 07 09:12:54 managed-node2 systemd[1]: Started podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service - A template for running K8s workloads via podman-kube-play. ░░ Subject: A start job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service has finished successfully. ░░ ░░ The job identifier is 2353. Jan 07 09:12:56 managed-node2 python3.12[27269]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 07 09:12:57 managed-node2 python3.12[27402]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/httpd3.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 07 09:12:58 managed-node2 python3.12[27534]: ansible-file Invoked with path=/tmp/lsr_kom0gs4j_podman/httpd3 state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Jan 07 09:12:58 managed-node2 python3.12[27665]: ansible-file Invoked with path=/tmp/lsr_kom0gs4j_podman/httpd3-create state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Jan 07 09:13:03 managed-node2 podman[27827]: 2025-01-07 09:13:03.415467564 -0500 EST m=+4.214936850 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610 Jan 07 09:13:03 managed-node2 python3.12[27972]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 07 09:13:04 managed-node2 python3.12[28103]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 07 09:13:04 managed-node2 systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state. Jan 07 09:13:04 managed-node2 python3.12[28234]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 07 09:13:05 managed-node2 python3.12[28340]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/ansible-kubernetes.d/httpd3.yml owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1736259184.4436557-17141-206954730606473/.source.yml _original_basename=.h5y_v4b2 follow=False checksum=3f9fb5fb859fc9596ef344d6e422ee2f64209bc4 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jan 07 09:13:05 managed-node2 python3.12[28471]: ansible-containers.podman.podman_play Invoked with state=started kube_file=/etc/containers/ansible-kubernetes.d/httpd3.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Jan 07 09:13:05 managed-node2 systemd[1]: Created slice machine-libpod_pod_ace8ef753c1dcf7138d1b0f4a0b2b78ed9e365871782b439684c6583797800ef.slice - cgroup machine-libpod_pod_ace8ef753c1dcf7138d1b0f4a0b2b78ed9e365871782b439684c6583797800ef.slice. ░░ Subject: A start job for unit machine-libpod_pod_ace8ef753c1dcf7138d1b0f4a0b2b78ed9e365871782b439684c6583797800ef.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit machine-libpod_pod_ace8ef753c1dcf7138d1b0f4a0b2b78ed9e365871782b439684c6583797800ef.slice has finished successfully. ░░ ░░ The job identifier is 2552. Jan 07 09:13:05 managed-node2 podman[28478]: 2025-01-07 09:13:05.662283243 -0500 EST m=+0.059054626 container create 4e013f3b0cbbb21554ae27e004216ebf4bb63a7077f5fdffa4273b6aa7fadb80 (image=localhost/podman-pause:5.3.1-1733097600, name=ace8ef753c1d-infra, pod_id=ace8ef753c1dcf7138d1b0f4a0b2b78ed9e365871782b439684c6583797800ef, io.buildah.version=1.38.0) Jan 07 09:13:05 managed-node2 podman[28478]: 2025-01-07 09:13:05.667015743 -0500 EST m=+0.063787068 pod create ace8ef753c1dcf7138d1b0f4a0b2b78ed9e365871782b439684c6583797800ef (image=, name=httpd3) Jan 07 09:13:05 managed-node2 podman[28478]: 2025-01-07 09:13:05.6927368 -0500 EST m=+0.089508198 container create c3289e9173085a5b5a1a30bb43f9c80d177bc2b36a900afe4944f56de35a95ff (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=ace8ef753c1dcf7138d1b0f4a0b2b78ed9e365871782b439684c6583797800ef, io.containers.autoupdate=registry, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jan 07 09:13:05 managed-node2 kernel: podman1: port 2(veth1) entered blocking state Jan 07 09:13:05 managed-node2 kernel: podman1: port 2(veth1) entered disabled state Jan 07 09:13:05 managed-node2 kernel: veth1: entered allmulticast mode Jan 07 09:13:05 managed-node2 podman[28478]: 2025-01-07 09:13:05.669222958 -0500 EST m=+0.065994400 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610 Jan 07 09:13:05 managed-node2 kernel: veth1: entered promiscuous mode Jan 07 09:13:05 managed-node2 kernel: podman1: port 2(veth1) entered blocking state Jan 07 09:13:05 managed-node2 kernel: podman1: port 2(veth1) entered forwarding state Jan 07 09:13:05 managed-node2 NetworkManager[780]: [1736259185.7231] manager: (veth1): new Veth device (/org/freedesktop/NetworkManager/Devices/7) Jan 07 09:13:05 managed-node2 NetworkManager[780]: [1736259185.7291] device (veth1): carrier: link connected Jan 07 09:13:05 managed-node2 (udev-worker)[28494]: Network interface NamePolicy= disabled on kernel command line. Jan 07 09:13:05 managed-node2 systemd[1]: Started libpod-conmon-4e013f3b0cbbb21554ae27e004216ebf4bb63a7077f5fdffa4273b6aa7fadb80.scope. ░░ Subject: A start job for unit libpod-conmon-4e013f3b0cbbb21554ae27e004216ebf4bb63a7077f5fdffa4273b6aa7fadb80.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-conmon-4e013f3b0cbbb21554ae27e004216ebf4bb63a7077f5fdffa4273b6aa7fadb80.scope has finished successfully. ░░ ░░ The job identifier is 2559. Jan 07 09:13:05 managed-node2 systemd[1]: Started libpod-4e013f3b0cbbb21554ae27e004216ebf4bb63a7077f5fdffa4273b6aa7fadb80.scope - libcrun container. ░░ Subject: A start job for unit libpod-4e013f3b0cbbb21554ae27e004216ebf4bb63a7077f5fdffa4273b6aa7fadb80.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-4e013f3b0cbbb21554ae27e004216ebf4bb63a7077f5fdffa4273b6aa7fadb80.scope has finished successfully. ░░ ░░ The job identifier is 2566. Jan 07 09:13:05 managed-node2 podman[28478]: 2025-01-07 09:13:05.828405419 -0500 EST m=+0.225177109 container init 4e013f3b0cbbb21554ae27e004216ebf4bb63a7077f5fdffa4273b6aa7fadb80 (image=localhost/podman-pause:5.3.1-1733097600, name=ace8ef753c1d-infra, pod_id=ace8ef753c1dcf7138d1b0f4a0b2b78ed9e365871782b439684c6583797800ef, io.buildah.version=1.38.0) Jan 07 09:13:05 managed-node2 podman[28478]: 2025-01-07 09:13:05.832160032 -0500 EST m=+0.228931455 container start 4e013f3b0cbbb21554ae27e004216ebf4bb63a7077f5fdffa4273b6aa7fadb80 (image=localhost/podman-pause:5.3.1-1733097600, name=ace8ef753c1d-infra, pod_id=ace8ef753c1dcf7138d1b0f4a0b2b78ed9e365871782b439684c6583797800ef, io.buildah.version=1.38.0) Jan 07 09:13:05 managed-node2 systemd[1]: Started libpod-conmon-c3289e9173085a5b5a1a30bb43f9c80d177bc2b36a900afe4944f56de35a95ff.scope. ░░ Subject: A start job for unit libpod-conmon-c3289e9173085a5b5a1a30bb43f9c80d177bc2b36a900afe4944f56de35a95ff.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-conmon-c3289e9173085a5b5a1a30bb43f9c80d177bc2b36a900afe4944f56de35a95ff.scope has finished successfully. ░░ ░░ The job identifier is 2573. Jan 07 09:13:05 managed-node2 systemd[1]: Started libpod-c3289e9173085a5b5a1a30bb43f9c80d177bc2b36a900afe4944f56de35a95ff.scope - libcrun container. ░░ Subject: A start job for unit libpod-c3289e9173085a5b5a1a30bb43f9c80d177bc2b36a900afe4944f56de35a95ff.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-c3289e9173085a5b5a1a30bb43f9c80d177bc2b36a900afe4944f56de35a95ff.scope has finished successfully. ░░ ░░ The job identifier is 2580. Jan 07 09:13:05 managed-node2 podman[28478]: 2025-01-07 09:13:05.893323556 -0500 EST m=+0.290094921 container init c3289e9173085a5b5a1a30bb43f9c80d177bc2b36a900afe4944f56de35a95ff (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=ace8ef753c1dcf7138d1b0f4a0b2b78ed9e365871782b439684c6583797800ef, io.containers.autoupdate=registry, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jan 07 09:13:05 managed-node2 podman[28478]: 2025-01-07 09:13:05.89629971 -0500 EST m=+0.293071039 container start c3289e9173085a5b5a1a30bb43f9c80d177bc2b36a900afe4944f56de35a95ff (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=ace8ef753c1dcf7138d1b0f4a0b2b78ed9e365871782b439684c6583797800ef, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, app=test, created_at=2021-06-10T18:55:36Z) Jan 07 09:13:05 managed-node2 podman[28478]: 2025-01-07 09:13:05.900970965 -0500 EST m=+0.297742301 pod start ace8ef753c1dcf7138d1b0f4a0b2b78ed9e365871782b439684c6583797800ef (image=, name=httpd3) Jan 07 09:13:06 managed-node2 python3.12[28658]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 07 09:13:06 managed-node2 systemd[1]: Reload requested from client PID 28659 ('systemctl') (unit session-6.scope)... Jan 07 09:13:06 managed-node2 systemd[1]: Reloading... Jan 07 09:13:06 managed-node2 systemd[1]: Reloading finished in 216 ms. Jan 07 09:13:06 managed-node2 systemd[1]: Starting fstrim.service - Discard unused blocks on filesystems from /etc/fstab... ░░ Subject: A start job for unit fstrim.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit fstrim.service has begun execution. ░░ ░░ The job identifier is 2587. Jan 07 09:13:07 managed-node2 systemd[1]: fstrim.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit fstrim.service has successfully entered the 'dead' state. Jan 07 09:13:07 managed-node2 systemd[1]: Finished fstrim.service - Discard unused blocks on filesystems from /etc/fstab. ░░ Subject: A start job for unit fstrim.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit fstrim.service has finished successfully. ░░ ░░ The job identifier is 2587. Jan 07 09:13:07 managed-node2 python3.12[28846]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service scope=system enabled=True daemon_reload=False daemon_reexec=False no_block=False state=None force=None masked=None Jan 07 09:13:07 managed-node2 systemd[1]: Reload requested from client PID 28849 ('systemctl') (unit session-6.scope)... Jan 07 09:13:07 managed-node2 systemd[1]: Reloading... Jan 07 09:13:07 managed-node2 systemd[1]: Reloading finished in 216 ms. Jan 07 09:13:08 managed-node2 python3.12[29034]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Jan 07 09:13:08 managed-node2 systemd[1]: Starting podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service - A template for running K8s workloads via podman-kube-play... ░░ Subject: A start job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service has begun execution. ░░ ░░ The job identifier is 2665. Jan 07 09:13:08 managed-node2 podman[29038]: 2025-01-07 09:13:08.647867233 -0500 EST m=+0.025545097 pod stop ace8ef753c1dcf7138d1b0f4a0b2b78ed9e365871782b439684c6583797800ef (image=, name=httpd3) Jan 07 09:13:08 managed-node2 systemd[1]: libpod-4e013f3b0cbbb21554ae27e004216ebf4bb63a7077f5fdffa4273b6aa7fadb80.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-4e013f3b0cbbb21554ae27e004216ebf4bb63a7077f5fdffa4273b6aa7fadb80.scope has successfully entered the 'dead' state. Jan 07 09:13:08 managed-node2 podman[29038]: 2025-01-07 09:13:08.671925638 -0500 EST m=+0.049603547 container died 4e013f3b0cbbb21554ae27e004216ebf4bb63a7077f5fdffa4273b6aa7fadb80 (image=localhost/podman-pause:5.3.1-1733097600, name=ace8ef753c1d-infra, io.buildah.version=1.38.0) Jan 07 09:13:08 managed-node2 kernel: podman1: port 2(veth1) entered disabled state Jan 07 09:13:08 managed-node2 kernel: veth1 (unregistering): left allmulticast mode Jan 07 09:13:08 managed-node2 kernel: veth1 (unregistering): left promiscuous mode Jan 07 09:13:08 managed-node2 kernel: podman1: port 2(veth1) entered disabled state Jan 07 09:13:08 managed-node2 systemd[1]: run-netns-netns\x2d61fd3e74\x2d76fb\x2d437e\x2d898c\x2d2a6c653e1740.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-netns-netns\x2d61fd3e74\x2d76fb\x2d437e\x2d898c\x2d2a6c653e1740.mount has successfully entered the 'dead' state. Jan 07 09:13:08 managed-node2 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-4e013f3b0cbbb21554ae27e004216ebf4bb63a7077f5fdffa4273b6aa7fadb80-userdata-shm.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay\x2dcontainers-4e013f3b0cbbb21554ae27e004216ebf4bb63a7077f5fdffa4273b6aa7fadb80-userdata-shm.mount has successfully entered the 'dead' state. Jan 07 09:13:08 managed-node2 systemd[1]: var-lib-containers-storage-overlay-2c7ef2a08c555fa94ed7dc34cb218fd5c019af0ad8db754ec30393e70bbc4a7c-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-2c7ef2a08c555fa94ed7dc34cb218fd5c019af0ad8db754ec30393e70bbc4a7c-merged.mount has successfully entered the 'dead' state. Jan 07 09:13:08 managed-node2 podman[29038]: 2025-01-07 09:13:08.746972652 -0500 EST m=+0.124650421 container cleanup 4e013f3b0cbbb21554ae27e004216ebf4bb63a7077f5fdffa4273b6aa7fadb80 (image=localhost/podman-pause:5.3.1-1733097600, name=ace8ef753c1d-infra, pod_id=ace8ef753c1dcf7138d1b0f4a0b2b78ed9e365871782b439684c6583797800ef, io.buildah.version=1.38.0) Jan 07 09:13:08 managed-node2 systemd[1]: libpod-conmon-4e013f3b0cbbb21554ae27e004216ebf4bb63a7077f5fdffa4273b6aa7fadb80.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-conmon-4e013f3b0cbbb21554ae27e004216ebf4bb63a7077f5fdffa4273b6aa7fadb80.scope has successfully entered the 'dead' state. Jan 07 09:13:18 managed-node2 podman[29038]: time="2025-01-07T09:13:18-05:00" level=warning msg="StopSignal SIGTERM failed to stop container httpd3-httpd3 in 10 seconds, resorting to SIGKILL" Jan 07 09:13:18 managed-node2 systemd[1]: libpod-c3289e9173085a5b5a1a30bb43f9c80d177bc2b36a900afe4944f56de35a95ff.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-c3289e9173085a5b5a1a30bb43f9c80d177bc2b36a900afe4944f56de35a95ff.scope has successfully entered the 'dead' state. Jan 07 09:13:18 managed-node2 podman[29038]: 2025-01-07 09:13:18.682592217 -0500 EST m=+10.060270133 container died c3289e9173085a5b5a1a30bb43f9c80d177bc2b36a900afe4944f56de35a95ff (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, app=test, created_at=2021-06-10T18:55:36Z) Jan 07 09:13:18 managed-node2 systemd[1]: var-lib-containers-storage-overlay-cedec7afe0a0745406bf75e45e9c627a4195378d5bd84ff9f8497d9a1a641b40-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-cedec7afe0a0745406bf75e45e9c627a4195378d5bd84ff9f8497d9a1a641b40-merged.mount has successfully entered the 'dead' state. Jan 07 09:13:18 managed-node2 podman[29038]: 2025-01-07 09:13:18.714723753 -0500 EST m=+10.092401491 container cleanup c3289e9173085a5b5a1a30bb43f9c80d177bc2b36a900afe4944f56de35a95ff (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=ace8ef753c1dcf7138d1b0f4a0b2b78ed9e365871782b439684c6583797800ef, io.containers.autoupdate=registry, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jan 07 09:13:18 managed-node2 systemd[1]: Stopping libpod-conmon-c3289e9173085a5b5a1a30bb43f9c80d177bc2b36a900afe4944f56de35a95ff.scope... ░░ Subject: A stop job for unit libpod-conmon-c3289e9173085a5b5a1a30bb43f9c80d177bc2b36a900afe4944f56de35a95ff.scope has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit libpod-conmon-c3289e9173085a5b5a1a30bb43f9c80d177bc2b36a900afe4944f56de35a95ff.scope has begun execution. ░░ ░░ The job identifier is 2751. Jan 07 09:13:18 managed-node2 systemd[1]: libpod-conmon-c3289e9173085a5b5a1a30bb43f9c80d177bc2b36a900afe4944f56de35a95ff.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-conmon-c3289e9173085a5b5a1a30bb43f9c80d177bc2b36a900afe4944f56de35a95ff.scope has successfully entered the 'dead' state. Jan 07 09:13:18 managed-node2 systemd[1]: Stopped libpod-conmon-c3289e9173085a5b5a1a30bb43f9c80d177bc2b36a900afe4944f56de35a95ff.scope. ░░ Subject: A stop job for unit libpod-conmon-c3289e9173085a5b5a1a30bb43f9c80d177bc2b36a900afe4944f56de35a95ff.scope has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit libpod-conmon-c3289e9173085a5b5a1a30bb43f9c80d177bc2b36a900afe4944f56de35a95ff.scope has finished. ░░ ░░ The job identifier is 2751 and the job result is done. Jan 07 09:13:18 managed-node2 systemd[1]: Removed slice machine-libpod_pod_ace8ef753c1dcf7138d1b0f4a0b2b78ed9e365871782b439684c6583797800ef.slice - cgroup machine-libpod_pod_ace8ef753c1dcf7138d1b0f4a0b2b78ed9e365871782b439684c6583797800ef.slice. ░░ Subject: A stop job for unit machine-libpod_pod_ace8ef753c1dcf7138d1b0f4a0b2b78ed9e365871782b439684c6583797800ef.slice has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit machine-libpod_pod_ace8ef753c1dcf7138d1b0f4a0b2b78ed9e365871782b439684c6583797800ef.slice has finished. ░░ ░░ The job identifier is 2750 and the job result is done. Jan 07 09:13:18 managed-node2 podman[29038]: 2025-01-07 09:13:18.727059126 -0500 EST m=+10.104736892 pod stop ace8ef753c1dcf7138d1b0f4a0b2b78ed9e365871782b439684c6583797800ef (image=, name=httpd3) Jan 07 09:13:18 managed-node2 systemd[1]: machine-libpod_pod_ace8ef753c1dcf7138d1b0f4a0b2b78ed9e365871782b439684c6583797800ef.slice: Failed to open /run/systemd/transient/machine-libpod_pod_ace8ef753c1dcf7138d1b0f4a0b2b78ed9e365871782b439684c6583797800ef.slice: No such file or directory Jan 07 09:13:18 managed-node2 podman[29038]: 2025-01-07 09:13:18.731012349 -0500 EST m=+10.108690120 pod stop ace8ef753c1dcf7138d1b0f4a0b2b78ed9e365871782b439684c6583797800ef (image=, name=httpd3) Jan 07 09:13:18 managed-node2 systemd[1]: machine-libpod_pod_ace8ef753c1dcf7138d1b0f4a0b2b78ed9e365871782b439684c6583797800ef.slice: Failed to open /run/systemd/transient/machine-libpod_pod_ace8ef753c1dcf7138d1b0f4a0b2b78ed9e365871782b439684c6583797800ef.slice: No such file or directory Jan 07 09:13:18 managed-node2 podman[29038]: 2025-01-07 09:13:18.752360105 -0500 EST m=+10.130037878 container remove c3289e9173085a5b5a1a30bb43f9c80d177bc2b36a900afe4944f56de35a95ff (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=ace8ef753c1dcf7138d1b0f4a0b2b78ed9e365871782b439684c6583797800ef, io.buildah.version=1.21.0, io.containers.autoupdate=registry, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jan 07 09:13:18 managed-node2 podman[29038]: 2025-01-07 09:13:18.777234648 -0500 EST m=+10.154912428 container remove 4e013f3b0cbbb21554ae27e004216ebf4bb63a7077f5fdffa4273b6aa7fadb80 (image=localhost/podman-pause:5.3.1-1733097600, name=ace8ef753c1d-infra, pod_id=ace8ef753c1dcf7138d1b0f4a0b2b78ed9e365871782b439684c6583797800ef, io.buildah.version=1.38.0) Jan 07 09:13:18 managed-node2 systemd[1]: machine-libpod_pod_ace8ef753c1dcf7138d1b0f4a0b2b78ed9e365871782b439684c6583797800ef.slice: Failed to open /run/systemd/transient/machine-libpod_pod_ace8ef753c1dcf7138d1b0f4a0b2b78ed9e365871782b439684c6583797800ef.slice: No such file or directory Jan 07 09:13:18 managed-node2 podman[29038]: 2025-01-07 09:13:18.784821843 -0500 EST m=+10.162499586 pod remove ace8ef753c1dcf7138d1b0f4a0b2b78ed9e365871782b439684c6583797800ef (image=, name=httpd3) Jan 07 09:13:18 managed-node2 podman[29038]: Pods stopped: Jan 07 09:13:18 managed-node2 podman[29038]: ace8ef753c1dcf7138d1b0f4a0b2b78ed9e365871782b439684c6583797800ef Jan 07 09:13:18 managed-node2 podman[29038]: Pods removed: Jan 07 09:13:18 managed-node2 podman[29038]: ace8ef753c1dcf7138d1b0f4a0b2b78ed9e365871782b439684c6583797800ef Jan 07 09:13:18 managed-node2 podman[29038]: Secrets removed: Jan 07 09:13:18 managed-node2 podman[29038]: Volumes removed: Jan 07 09:13:18 managed-node2 podman[29038]: 2025-01-07 09:13:18.805183764 -0500 EST m=+10.182861530 container create c8ea84d4fdcdc7f82778fa4c948f045584db3f8eb1d1f88bb9368e9dd0f27b31 (image=localhost/podman-pause:5.3.1-1733097600, name=aeb421c16034-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service) Jan 07 09:13:18 managed-node2 systemd[1]: Created slice machine-libpod_pod_3457d4d54eeca4fb47b37d6f37ab7caf65c7a6ab614c5bab1a90ac21f23bb7ee.slice - cgroup machine-libpod_pod_3457d4d54eeca4fb47b37d6f37ab7caf65c7a6ab614c5bab1a90ac21f23bb7ee.slice. ░░ Subject: A start job for unit machine-libpod_pod_3457d4d54eeca4fb47b37d6f37ab7caf65c7a6ab614c5bab1a90ac21f23bb7ee.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit machine-libpod_pod_3457d4d54eeca4fb47b37d6f37ab7caf65c7a6ab614c5bab1a90ac21f23bb7ee.slice has finished successfully. ░░ ░░ The job identifier is 2752. Jan 07 09:13:18 managed-node2 podman[29038]: 2025-01-07 09:13:18.8434908 -0500 EST m=+10.221168683 container create 0479d7891e3035920e5b465837b256f30fdc1e0fadf587d7dc25f03d4020988d (image=localhost/podman-pause:5.3.1-1733097600, name=3457d4d54eec-infra, pod_id=3457d4d54eeca4fb47b37d6f37ab7caf65c7a6ab614c5bab1a90ac21f23bb7ee, io.buildah.version=1.38.0, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service) Jan 07 09:13:18 managed-node2 podman[29038]: 2025-01-07 09:13:18.848043542 -0500 EST m=+10.225721306 pod create 3457d4d54eeca4fb47b37d6f37ab7caf65c7a6ab614c5bab1a90ac21f23bb7ee (image=, name=httpd3) Jan 07 09:13:18 managed-node2 podman[29038]: 2025-01-07 09:13:18.875470949 -0500 EST m=+10.253148743 container create 4bd23d3a55ea101d4f9a990a1aac44af242e338ceb6e025bbcc47b967d7d1b6b (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=3457d4d54eeca4fb47b37d6f37ab7caf65c7a6ab614c5bab1a90ac21f23bb7ee, created_by=test/system/build-testimage, io.buildah.version=1.21.0, app=test, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service, created_at=2021-06-10T18:55:36Z) Jan 07 09:13:18 managed-node2 podman[29038]: 2025-01-07 09:13:18.875833848 -0500 EST m=+10.253511627 container restart c8ea84d4fdcdc7f82778fa4c948f045584db3f8eb1d1f88bb9368e9dd0f27b31 (image=localhost/podman-pause:5.3.1-1733097600, name=aeb421c16034-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service) Jan 07 09:13:18 managed-node2 podman[29038]: 2025-01-07 09:13:18.850196796 -0500 EST m=+10.227874699 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610 Jan 07 09:13:18 managed-node2 systemd[1]: Started libpod-c8ea84d4fdcdc7f82778fa4c948f045584db3f8eb1d1f88bb9368e9dd0f27b31.scope - libcrun container. ░░ Subject: A start job for unit libpod-c8ea84d4fdcdc7f82778fa4c948f045584db3f8eb1d1f88bb9368e9dd0f27b31.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-c8ea84d4fdcdc7f82778fa4c948f045584db3f8eb1d1f88bb9368e9dd0f27b31.scope has finished successfully. ░░ ░░ The job identifier is 2758. Jan 07 09:13:18 managed-node2 podman[29038]: 2025-01-07 09:13:18.9282523 -0500 EST m=+10.305930108 container init c8ea84d4fdcdc7f82778fa4c948f045584db3f8eb1d1f88bb9368e9dd0f27b31 (image=localhost/podman-pause:5.3.1-1733097600, name=aeb421c16034-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service) Jan 07 09:13:18 managed-node2 podman[29038]: 2025-01-07 09:13:18.930560751 -0500 EST m=+10.308238599 container start c8ea84d4fdcdc7f82778fa4c948f045584db3f8eb1d1f88bb9368e9dd0f27b31 (image=localhost/podman-pause:5.3.1-1733097600, name=aeb421c16034-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service) Jan 07 09:13:18 managed-node2 kernel: podman1: port 2(veth1) entered blocking state Jan 07 09:13:18 managed-node2 kernel: podman1: port 2(veth1) entered disabled state Jan 07 09:13:18 managed-node2 kernel: veth1: entered allmulticast mode Jan 07 09:13:18 managed-node2 kernel: veth1: entered promiscuous mode Jan 07 09:13:18 managed-node2 kernel: podman1: port 2(veth1) entered blocking state Jan 07 09:13:18 managed-node2 kernel: podman1: port 2(veth1) entered forwarding state Jan 07 09:13:18 managed-node2 NetworkManager[780]: [1736259198.9608] manager: (veth1): new Veth device (/org/freedesktop/NetworkManager/Devices/8) Jan 07 09:13:18 managed-node2 NetworkManager[780]: [1736259198.9622] device (veth1): carrier: link connected Jan 07 09:13:18 managed-node2 (udev-worker)[29083]: Network interface NamePolicy= disabled on kernel command line. Jan 07 09:13:19 managed-node2 systemd[1]: Started libpod-0479d7891e3035920e5b465837b256f30fdc1e0fadf587d7dc25f03d4020988d.scope - libcrun container. ░░ Subject: A start job for unit libpod-0479d7891e3035920e5b465837b256f30fdc1e0fadf587d7dc25f03d4020988d.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-0479d7891e3035920e5b465837b256f30fdc1e0fadf587d7dc25f03d4020988d.scope has finished successfully. ░░ ░░ The job identifier is 2765. Jan 07 09:13:19 managed-node2 podman[29038]: 2025-01-07 09:13:19.047971575 -0500 EST m=+10.425649470 container init 0479d7891e3035920e5b465837b256f30fdc1e0fadf587d7dc25f03d4020988d (image=localhost/podman-pause:5.3.1-1733097600, name=3457d4d54eec-infra, pod_id=3457d4d54eeca4fb47b37d6f37ab7caf65c7a6ab614c5bab1a90ac21f23bb7ee, io.buildah.version=1.38.0, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service) Jan 07 09:13:19 managed-node2 podman[29038]: 2025-01-07 09:13:19.050915982 -0500 EST m=+10.428594064 container start 0479d7891e3035920e5b465837b256f30fdc1e0fadf587d7dc25f03d4020988d (image=localhost/podman-pause:5.3.1-1733097600, name=3457d4d54eec-infra, pod_id=3457d4d54eeca4fb47b37d6f37ab7caf65c7a6ab614c5bab1a90ac21f23bb7ee, io.buildah.version=1.38.0, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service) Jan 07 09:13:19 managed-node2 systemd[1]: Started libpod-4bd23d3a55ea101d4f9a990a1aac44af242e338ceb6e025bbcc47b967d7d1b6b.scope - libcrun container. ░░ Subject: A start job for unit libpod-4bd23d3a55ea101d4f9a990a1aac44af242e338ceb6e025bbcc47b967d7d1b6b.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-4bd23d3a55ea101d4f9a990a1aac44af242e338ceb6e025bbcc47b967d7d1b6b.scope has finished successfully. ░░ ░░ The job identifier is 2772. Jan 07 09:13:19 managed-node2 podman[29038]: 2025-01-07 09:13:19.097276903 -0500 EST m=+10.474954798 container init 4bd23d3a55ea101d4f9a990a1aac44af242e338ceb6e025bbcc47b967d7d1b6b (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=3457d4d54eeca4fb47b37d6f37ab7caf65c7a6ab614c5bab1a90ac21f23bb7ee, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jan 07 09:13:19 managed-node2 podman[29038]: 2025-01-07 09:13:19.099639667 -0500 EST m=+10.477317549 container start 4bd23d3a55ea101d4f9a990a1aac44af242e338ceb6e025bbcc47b967d7d1b6b (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=3457d4d54eeca4fb47b37d6f37ab7caf65c7a6ab614c5bab1a90ac21f23bb7ee, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jan 07 09:13:19 managed-node2 podman[29038]: 2025-01-07 09:13:19.103452832 -0500 EST m=+10.481130602 pod start 3457d4d54eeca4fb47b37d6f37ab7caf65c7a6ab614c5bab1a90ac21f23bb7ee (image=, name=httpd3) Jan 07 09:13:19 managed-node2 podman[29038]: Pod: Jan 07 09:13:19 managed-node2 podman[29038]: 3457d4d54eeca4fb47b37d6f37ab7caf65c7a6ab614c5bab1a90ac21f23bb7ee Jan 07 09:13:19 managed-node2 podman[29038]: Container: Jan 07 09:13:19 managed-node2 podman[29038]: 4bd23d3a55ea101d4f9a990a1aac44af242e338ceb6e025bbcc47b967d7d1b6b Jan 07 09:13:19 managed-node2 systemd[1]: Started podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service - A template for running K8s workloads via podman-kube-play. ░░ Subject: A start job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service has finished successfully. ░░ ░░ The job identifier is 2665. Jan 07 09:13:19 managed-node2 sudo[29289]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-pnmkbnzuldsvzobphkxtjasvboghknnr ; /usr/bin/python3.12 /var/tmp/ansible-tmp-1736259199.4684494-17727-168682880762240/AnsiballZ_command.py' Jan 07 09:13:19 managed-node2 sudo[29289]: pam_systemd(sudo:session): New sd-bus connection (system-bus-pam-systemd-29289) opened. Jan 07 09:13:19 managed-node2 sudo[29289]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0) Jan 07 09:13:19 managed-node2 python3.12[29292]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod inspect httpd1 --format '{{.State}}' _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 07 09:13:19 managed-node2 systemd[23047]: Started podman-29300.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 112. Jan 07 09:13:19 managed-node2 sudo[29289]: pam_unix(sudo:session): session closed for user podman_basic_user Jan 07 09:13:20 managed-node2 python3.12[29439]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod inspect httpd2 --format '{{.State}}' _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 07 09:13:20 managed-node2 python3.12[29578]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod inspect httpd3 --format '{{.State}}' _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 07 09:13:20 managed-node2 sudo[29759]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-vsldhxkafmrvalmibizzajqfspywsjxm ; XDG_RUNTIME_DIR=/run/user/3001 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736259200.8099418-17783-63529375764264/AnsiballZ_command.py' Jan 07 09:13:21 managed-node2 sudo[29759]: pam_systemd(sudo:session): New sd-bus connection (system-bus-pam-systemd-29759) opened. Jan 07 09:13:21 managed-node2 sudo[29759]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0) Jan 07 09:13:21 managed-node2 python3.12[29762]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail systemctl --user list-units -a -l --plain | grep -E '^[ ]*podman-kube@.+-httpd1[.]yml[.]service[ ]+loaded[ ]+active ' _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 07 09:13:21 managed-node2 sudo[29759]: pam_unix(sudo:session): session closed for user podman_basic_user Jan 07 09:13:21 managed-node2 python3.12[29896]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail systemctl --system list-units -a -l --plain | grep -E '^[ ]*podman-kube@.+-httpd2[.]yml[.]service[ ]+loaded[ ]+active ' _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 07 09:13:21 managed-node2 python3.12[30030]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail systemctl --system list-units -a -l --plain | grep -E '^[ ]*podman-kube@.+-httpd3[.]yml[.]service[ ]+loaded[ ]+active ' _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 07 09:13:22 managed-node2 python3.12[30164]: ansible-ansible.legacy.uri Invoked with url=http://localhost:15001/index.txt return_content=True force=False http_agent=ansible-httpget use_proxy=True validate_certs=True force_basic_auth=False use_gssapi=False body_format=raw method=GET follow_redirects=safe status_code=[200] timeout=30 headers={} remote_src=False unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None dest=None body=None src=None creates=None removes=None unix_socket=None ca_path=None ciphers=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 07 09:13:23 managed-node2 python3.12[30297]: ansible-ansible.legacy.uri Invoked with url=http://localhost:15002/index.txt return_content=True force=False http_agent=ansible-httpget use_proxy=True validate_certs=True force_basic_auth=False use_gssapi=False body_format=raw method=GET follow_redirects=safe status_code=[200] timeout=30 headers={} remote_src=False unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None dest=None body=None src=None creates=None removes=None unix_socket=None ca_path=None ciphers=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 07 09:13:23 managed-node2 python3.12[30428]: ansible-ansible.legacy.command Invoked with _raw_params=journalctl -ex _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 07 09:13:24 managed-node2 python3.12[30560]: ansible-file Invoked with path=/etc/containers/storage.conf state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 07 09:13:24 managed-node2 python3.12[30691]: ansible-file Invoked with path=/tmp/lsr_kom0gs4j_podman state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 07 09:13:27 managed-node2 python3.12[30865]: ansible-ansible.legacy.setup Invoked with gather_subset=['all'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d Jan 07 09:13:29 managed-node2 python3.12[31038]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 07 09:13:30 managed-node2 python3.12[31169]: ansible-ansible.legacy.dnf Invoked with name=['python3-pyasn1', 'python3-cryptography', 'python3-dbus'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 07 09:13:32 managed-node2 python3.12[31305]: ansible-ansible.legacy.dnf Invoked with name=['certmonger', 'python3-packaging'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 07 09:13:34 managed-node2 dbus-broker-launch[619]: Noticed file-system modification, trigger reload. ░░ Subject: A configuration directory was written to ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ A write was detected to one of the directories containing D-Bus configuration ░░ files, triggering a configuration reload. ░░ ░░ This functionality exists for backwards compatibility to pick up changes to ░░ D-Bus configuration without an explicit reolad request. Typically when ░░ installing or removing third-party software causes D-Bus configuration files ░░ to be added or removed. ░░ ░░ It is worth noting that this may cause partial configuration to be loaded in ░░ case dispatching this notification races with the writing of the configuration ░░ files. However, a future notification will then cause the configuration to be ░░ reladed again. Jan 07 09:13:34 managed-node2 dbus-broker-launch[23523]: Noticed file-system modification, trigger reload. ░░ Subject: A configuration directory was written to ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ A write was detected to one of the directories containing D-Bus configuration ░░ files, triggering a configuration reload. ░░ ░░ This functionality exists for backwards compatibility to pick up changes to ░░ D-Bus configuration without an explicit reolad request. Typically when ░░ installing or removing third-party software causes D-Bus configuration files ░░ to be added or removed. ░░ ░░ It is worth noting that this may cause partial configuration to be loaded in ░░ case dispatching this notification races with the writing of the configuration ░░ files. However, a future notification will then cause the configuration to be ░░ reladed again. Jan 07 09:13:34 managed-node2 dbus-broker-launch[619]: Noticed file-system modification, trigger reload. ░░ Subject: A configuration directory was written to ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ A write was detected to one of the directories containing D-Bus configuration ░░ files, triggering a configuration reload. ░░ ░░ This functionality exists for backwards compatibility to pick up changes to ░░ D-Bus configuration without an explicit reolad request. Typically when ░░ installing or removing third-party software causes D-Bus configuration files ░░ to be added or removed. ░░ ░░ It is worth noting that this may cause partial configuration to be loaded in ░░ case dispatching this notification races with the writing of the configuration ░░ files. However, a future notification will then cause the configuration to be ░░ reladed again. Jan 07 09:13:34 managed-node2 dbus-broker-launch[23523]: Policy to allow eavesdropping in /usr/share/dbus-1/session.conf +31: Eavesdropping is deprecated and ignored Jan 07 09:13:34 managed-node2 dbus-broker-launch[23523]: Policy to allow eavesdropping in /usr/share/dbus-1/session.conf +33: Eavesdropping is deprecated and ignored Jan 07 09:13:34 managed-node2 dbus-broker-launch[619]: Noticed file-system modification, trigger reload. ░░ Subject: A configuration directory was written to ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ A write was detected to one of the directories containing D-Bus configuration ░░ files, triggering a configuration reload. ░░ ░░ This functionality exists for backwards compatibility to pick up changes to ░░ D-Bus configuration without an explicit reolad request. Typically when ░░ installing or removing third-party software causes D-Bus configuration files ░░ to be added or removed. ░░ ░░ It is worth noting that this may cause partial configuration to be loaded in ░░ case dispatching this notification races with the writing of the configuration ░░ files. However, a future notification will then cause the configuration to be ░░ reladed again. Jan 07 09:13:34 managed-node2 dbus-broker-launch[23523]: Noticed file-system modification, trigger reload. ░░ Subject: A configuration directory was written to ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ A write was detected to one of the directories containing D-Bus configuration ░░ files, triggering a configuration reload. ░░ ░░ This functionality exists for backwards compatibility to pick up changes to ░░ D-Bus configuration without an explicit reolad request. Typically when ░░ installing or removing third-party software causes D-Bus configuration files ░░ to be added or removed. ░░ ░░ It is worth noting that this may cause partial configuration to be loaded in ░░ case dispatching this notification races with the writing of the configuration ░░ files. However, a future notification will then cause the configuration to be ░░ reladed again. Jan 07 09:13:34 managed-node2 dbus-broker-launch[23523]: Policy to allow eavesdropping in /usr/share/dbus-1/session.conf +31: Eavesdropping is deprecated and ignored Jan 07 09:13:34 managed-node2 dbus-broker-launch[23523]: Policy to allow eavesdropping in /usr/share/dbus-1/session.conf +33: Eavesdropping is deprecated and ignored Jan 07 09:13:34 managed-node2 systemd[1]: Reload requested from client PID 31313 ('systemctl') (unit session-6.scope)... Jan 07 09:13:34 managed-node2 systemd[1]: Reloading... Jan 07 09:13:34 managed-node2 systemd[1]: Reloading finished in 217 ms. Jan 07 09:13:34 managed-node2 systemd[1]: Started run-r5293c8ab6057453592d98f0dcc4cfa58.service - /usr/bin/systemctl start man-db-cache-update. ░░ Subject: A start job for unit run-r5293c8ab6057453592d98f0dcc4cfa58.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit run-r5293c8ab6057453592d98f0dcc4cfa58.service has finished successfully. ░░ ░░ The job identifier is 2783. Jan 07 09:13:34 managed-node2 systemd[1]: Starting man-db-cache-update.service... ░░ Subject: A start job for unit man-db-cache-update.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit man-db-cache-update.service has begun execution. ░░ ░░ The job identifier is 2861. Jan 07 09:13:34 managed-node2 systemd[1]: Reload requested from client PID 31378 ('systemctl') (unit session-6.scope)... Jan 07 09:13:34 managed-node2 systemd[1]: Reloading... Jan 07 09:13:34 managed-node2 systemd[1]: Reloading finished in 363 ms. Jan 07 09:13:35 managed-node2 systemd[1]: Queuing reload/restart jobs for marked units… Jan 07 09:13:35 managed-node2 systemd[1]: man-db-cache-update.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit man-db-cache-update.service has successfully entered the 'dead' state. Jan 07 09:13:35 managed-node2 systemd[1]: Finished man-db-cache-update.service. ░░ Subject: A start job for unit man-db-cache-update.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit man-db-cache-update.service has finished successfully. ░░ ░░ The job identifier is 2861. Jan 07 09:13:35 managed-node2 systemd[1]: run-r5293c8ab6057453592d98f0dcc4cfa58.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-r5293c8ab6057453592d98f0dcc4cfa58.service has successfully entered the 'dead' state. Jan 07 09:13:36 managed-node2 python3.12[31571]: ansible-file Invoked with name=/etc/certmonger//pre-scripts owner=root group=root mode=0700 state=directory path=/etc/certmonger//pre-scripts recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 07 09:13:36 managed-node2 python3.12[31702]: ansible-file Invoked with name=/etc/certmonger//post-scripts owner=root group=root mode=0700 state=directory path=/etc/certmonger//post-scripts recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 07 09:13:37 managed-node2 python3.12[31833]: ansible-ansible.legacy.systemd Invoked with name=certmonger state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Jan 07 09:13:37 managed-node2 systemd[1]: Reload requested from client PID 31836 ('systemctl') (unit session-6.scope)... Jan 07 09:13:37 managed-node2 systemd[1]: Reloading... Jan 07 09:13:37 managed-node2 systemd[1]: Reloading finished in 214 ms. Jan 07 09:13:37 managed-node2 systemd[1]: Starting certmonger.service - Certificate monitoring and PKI enrollment... ░░ Subject: A start job for unit certmonger.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit certmonger.service has begun execution. ░░ ░░ The job identifier is 2939. Jan 07 09:13:37 managed-node2 (rtmonger)[31893]: certmonger.service: Referenced but unset environment variable evaluates to an empty string: OPTS Jan 07 09:13:37 managed-node2 systemd[1]: Started certmonger.service - Certificate monitoring and PKI enrollment. ░░ Subject: A start job for unit certmonger.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit certmonger.service has finished successfully. ░░ ░░ The job identifier is 2939. Jan 07 09:13:38 managed-node2 python3.12[32051]: ansible-fedora.linux_system_roles.certificate_request Invoked with name=quadlet_demo dns=['localhost'] directory=/etc/pki/tls wait=True ca=self-sign __header=# # Ansible managed # # system_role:certificate provider_config_directory=/etc/certmonger provider=certmonger key_usage=['digitalSignature', 'keyEncipherment'] extended_key_usage=['id-kp-serverAuth', 'id-kp-clientAuth'] auto_renew=True ip=None email=None common_name=None country=None state=None locality=None organization=None organizational_unit=None contact_email=None key_size=None owner=None group=None mode=None principal=None run_before=None run_after=None Jan 07 09:13:38 managed-node2 certmonger[31893]: 2025-01-07 09:13:38 [31893] Wrote to /var/lib/certmonger/requests/20250107141338 Jan 07 09:13:38 managed-node2 certmonger[31893]: 2025-01-07 09:13:38 [31893] Wrote to /var/lib/certmonger/requests/20250107141338 Jan 07 09:13:38 managed-node2 certmonger[31893]: 2025-01-07 09:13:38 [31893] Wrote to /var/lib/certmonger/requests/20250107141338 Jan 07 09:13:38 managed-node2 certmonger[31893]: 2025-01-07 09:13:38 [31893] Wrote to /var/lib/certmonger/requests/20250107141338 Jan 07 09:13:38 managed-node2 certmonger[31893]: 2025-01-07 09:13:38 [31893] Wrote to /var/lib/certmonger/requests/20250107141338 Jan 07 09:13:38 managed-node2 certmonger[31893]: 2025-01-07 09:13:38 [31893] Wrote to /var/lib/certmonger/requests/20250107141338 Jan 07 09:13:38 managed-node2 certmonger[31893]: 2025-01-07 09:13:38 [31893] Wrote to /var/lib/certmonger/requests/20250107141338 Jan 07 09:13:38 managed-node2 certmonger[31893]: 2025-01-07 09:13:38 [31893] Wrote to /var/lib/certmonger/requests/20250107141338 Jan 07 09:13:38 managed-node2 certmonger[31893]: 2025-01-07 09:13:38 [31893] Wrote to /var/lib/certmonger/requests/20250107141338 Jan 07 09:13:38 managed-node2 rsyslogd[656]: imjournal: journal files changed, reloading... [v8.2408.0-2.el10 try https://www.rsyslog.com/e/0 ] Jan 07 09:13:38 managed-node2 certmonger[31893]: 2025-01-07 09:13:38 [31893] Wrote to /var/lib/certmonger/requests/20250107141338 Jan 07 09:13:38 managed-node2 certmonger[31893]: 2025-01-07 09:13:38 [31893] Wrote to /var/lib/certmonger/requests/20250107141338 Jan 07 09:13:38 managed-node2 certmonger[31893]: 2025-01-07 09:13:38 [31893] Wrote to /var/lib/certmonger/requests/20250107141338 Jan 07 09:13:38 managed-node2 certmonger[31893]: 2025-01-07 09:13:38 [31893] Wrote to /var/lib/certmonger/requests/20250107141338 Jan 07 09:13:38 managed-node2 certmonger[31893]: 2025-01-07 09:13:38 [31893] Wrote to /var/lib/certmonger/requests/20250107141338 Jan 07 09:13:38 managed-node2 certmonger[31893]: 2025-01-07 09:13:38 [31893] Wrote to /var/lib/certmonger/requests/20250107141338 Jan 07 09:13:38 managed-node2 certmonger[31893]: 2025-01-07 09:13:38 [31893] Wrote to /var/lib/certmonger/requests/20250107141338 Jan 07 09:13:38 managed-node2 certmonger[31893]: 2025-01-07 09:13:38 [31893] Wrote to /var/lib/certmonger/requests/20250107141338 Jan 07 09:13:38 managed-node2 certmonger[31893]: 2025-01-07 09:13:38 [31893] Wrote to /var/lib/certmonger/requests/20250107141338 Jan 07 09:13:38 managed-node2 certmonger[31893]: 2025-01-07 09:13:38 [31893] Wrote to /var/lib/certmonger/requests/20250107141338 Jan 07 09:13:38 managed-node2 certmonger[31893]: 2025-01-07 09:13:38 [31893] Wrote to /var/lib/certmonger/requests/20250107141338 Jan 07 09:13:38 managed-node2 certmonger[31893]: 2025-01-07 09:13:38 [31893] Wrote to /var/lib/certmonger/requests/20250107141338 Jan 07 09:13:38 managed-node2 certmonger[31893]: 2025-01-07 09:13:38 [31893] Wrote to /var/lib/certmonger/requests/20250107141338 Jan 07 09:13:38 managed-node2 certmonger[31893]: 2025-01-07 09:13:38 [31893] Wrote to /var/lib/certmonger/requests/20250107141338 Jan 07 09:13:38 managed-node2 certmonger[31893]: 2025-01-07 09:13:38 [31893] Wrote to /var/lib/certmonger/requests/20250107141338 Jan 07 09:13:38 managed-node2 certmonger[31893]: 2025-01-07 09:13:38 [31893] Wrote to /var/lib/certmonger/requests/20250107141338 Jan 07 09:13:38 managed-node2 certmonger[31893]: 2025-01-07 09:13:38 [31893] Wrote to /var/lib/certmonger/requests/20250107141338 Jan 07 09:13:38 managed-node2 certmonger[31893]: 2025-01-07 09:13:38 [31893] Wrote to /var/lib/certmonger/requests/20250107141338 Jan 07 09:13:38 managed-node2 certmonger[31893]: 2025-01-07 09:13:38 [31893] Wrote to /var/lib/certmonger/requests/20250107141338 Jan 07 09:13:38 managed-node2 certmonger[31893]: 2025-01-07 09:13:38 [31893] Wrote to /var/lib/certmonger/requests/20250107141338 Jan 07 09:13:38 managed-node2 certmonger[32067]: Certificate in file "/etc/pki/tls/certs/quadlet_demo.crt" issued by CA and saved. Jan 07 09:13:38 managed-node2 certmonger[31893]: 2025-01-07 09:13:38 [31893] Wrote to /var/lib/certmonger/requests/20250107141338 Jan 07 09:13:39 managed-node2 python3.12[32198]: ansible-slurp Invoked with path=/etc/pki/tls/certs/quadlet_demo.crt src=/etc/pki/tls/certs/quadlet_demo.crt Jan 07 09:13:39 managed-node2 python3.12[32329]: ansible-slurp Invoked with path=/etc/pki/tls/private/quadlet_demo.key src=/etc/pki/tls/private/quadlet_demo.key Jan 07 09:13:39 managed-node2 python3.12[32460]: ansible-slurp Invoked with path=/etc/pki/tls/certs/quadlet_demo.crt src=/etc/pki/tls/certs/quadlet_demo.crt Jan 07 09:13:40 managed-node2 python3.12[32591]: ansible-ansible.legacy.command Invoked with _raw_params=getcert stop-tracking -f /etc/pki/tls/certs/quadlet_demo.crt _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 07 09:13:40 managed-node2 certmonger[31893]: 2025-01-07 09:13:40 [31893] Wrote to /var/lib/certmonger/requests/20250107141338 Jan 07 09:13:40 managed-node2 python3.12[32723]: ansible-file Invoked with path=/etc/pki/tls/certs/quadlet_demo.crt state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 07 09:13:41 managed-node2 python3.12[32854]: ansible-file Invoked with path=/etc/pki/tls/private/quadlet_demo.key state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 07 09:13:41 managed-node2 python3.12[32985]: ansible-file Invoked with path=/etc/pki/tls/certs/quadlet_demo.crt state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 07 09:13:42 managed-node2 python3.12[33116]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 07 09:13:42 managed-node2 python3.12[33247]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 07 09:13:44 managed-node2 python3.12[33509]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 07 09:13:45 managed-node2 python3.12[33646]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None Jan 07 09:13:45 managed-node2 python3.12[33778]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 07 09:13:47 managed-node2 python3.12[33911]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 07 09:13:48 managed-node2 python3.12[34042]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 07 09:13:48 managed-node2 python3.12[34173]: ansible-ansible.legacy.dnf Invoked with name=['firewalld'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 07 09:13:49 managed-node2 python3.12[34305]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False scope=system no_block=False state=None enabled=None force=None Jan 07 09:13:50 managed-node2 python3.12[34438]: ansible-ansible.legacy.systemd Invoked with name=firewalld state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Jan 07 09:13:50 managed-node2 python3.12[34571]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['8000/tcp'] permanent=True runtime=True state=enabled __report_changed=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Jan 07 09:13:51 managed-node2 python3.12[34702]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['9000/tcp'] permanent=True runtime=True state=enabled __report_changed=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Jan 07 09:13:57 managed-node2 python3.12[35313]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 07 09:13:59 managed-node2 python3.12[35446]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 07 09:13:59 managed-node2 python3.12[35577]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/quadlet-demo.network follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 07 09:14:00 managed-node2 python3.12[35682]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1736259239.349858-19473-42692965240365/.source.network dest=/etc/containers/systemd/quadlet-demo.network owner=root group=0 mode=0644 _original_basename=quadlet-demo.network follow=False checksum=e57c08d49aff4bae8daab138d913aeddaa8682a0 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jan 07 09:14:00 managed-node2 python3.12[35813]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 07 09:14:00 managed-node2 systemd[1]: Reload requested from client PID 35814 ('systemctl') (unit session-6.scope)... Jan 07 09:14:00 managed-node2 systemd[1]: Reloading... Jan 07 09:14:00 managed-node2 systemd[1]: Reloading finished in 213 ms. Jan 07 09:14:01 managed-node2 python3.12[36001]: ansible-systemd Invoked with name=quadlet-demo-network.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Jan 07 09:14:01 managed-node2 systemd[1]: Starting quadlet-demo-network.service... ░░ Subject: A start job for unit quadlet-demo-network.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-demo-network.service has begun execution. ░░ ░░ The job identifier is 3018. Jan 07 09:14:01 managed-node2 quadlet-demo-network[36005]: systemd-quadlet-demo Jan 07 09:14:01 managed-node2 systemd[1]: Finished quadlet-demo-network.service. ░░ Subject: A start job for unit quadlet-demo-network.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-demo-network.service has finished successfully. ░░ ░░ The job identifier is 3018. Jan 07 09:14:02 managed-node2 python3.12[36143]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 07 09:14:04 managed-node2 python3.12[36276]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 07 09:14:05 managed-node2 python3.12[36407]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/quadlet-demo-mysql.volume follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 07 09:14:05 managed-node2 python3.12[36512]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1736259244.858735-19722-68882637510749/.source.volume dest=/etc/containers/systemd/quadlet-demo-mysql.volume owner=root group=0 mode=0644 _original_basename=quadlet-demo-mysql.volume follow=False checksum=585f8cbdf0ec73000f9227dcffbef71e9552ea4a backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jan 07 09:14:06 managed-node2 python3.12[36643]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 07 09:14:06 managed-node2 systemd[1]: Reload requested from client PID 36644 ('systemctl') (unit session-6.scope)... Jan 07 09:14:06 managed-node2 systemd[1]: Reloading... Jan 07 09:14:06 managed-node2 systemd[1]: Reloading finished in 218 ms. Jan 07 09:14:06 managed-node2 python3.12[36830]: ansible-systemd Invoked with name=quadlet-demo-mysql-volume.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Jan 07 09:14:06 managed-node2 systemd[1]: Starting quadlet-demo-mysql-volume.service... ░░ Subject: A start job for unit quadlet-demo-mysql-volume.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-demo-mysql-volume.service has begun execution. ░░ ░░ The job identifier is 3102. Jan 07 09:14:07 managed-node2 podman[36834]: 2025-01-07 09:14:07.042513615 -0500 EST m=+0.026104344 volume create systemd-quadlet-demo-mysql Jan 07 09:14:07 managed-node2 quadlet-demo-mysql-volume[36834]: systemd-quadlet-demo-mysql Jan 07 09:14:07 managed-node2 systemd[1]: Finished quadlet-demo-mysql-volume.service. ░░ Subject: A start job for unit quadlet-demo-mysql-volume.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-demo-mysql-volume.service has finished successfully. ░░ ░░ The job identifier is 3102. Jan 07 09:14:08 managed-node2 python3.12[36973]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 07 09:14:09 managed-node2 python3.12[37106]: ansible-file Invoked with path=/tmp/quadlet_demo state=directory owner=root group=root mode=0777 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 07 09:14:18 managed-node2 podman[37245]: 2025-01-07 09:14:18.091301948 -0500 EST m=+7.582714957 image pull dd3b2a5dcb48ff61113592ed5ddd762581be4387c7bc552375a2159422aa6bf5 quay.io/linux-system-roles/mysql:5.6 Jan 07 09:14:18 managed-node2 python3.12[37555]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 07 09:14:19 managed-node2 python3.12[37686]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/quadlet-demo-mysql.container follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 07 09:14:19 managed-node2 python3.12[37791]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/systemd/quadlet-demo-mysql.container owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1736259258.759416-20427-10094735080069/.source.container _original_basename=.fjkuewtn follow=False checksum=ca62b2ad3cc9afb5b5371ebbf797b9bc4fd7edd4 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jan 07 09:14:19 managed-node2 python3.12[37922]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 07 09:14:20 managed-node2 systemd[1]: Reload requested from client PID 37923 ('systemctl') (unit session-6.scope)... Jan 07 09:14:20 managed-node2 systemd[1]: Reloading... Jan 07 09:14:20 managed-node2 systemd[1]: Reloading finished in 220 ms. Jan 07 09:14:20 managed-node2 python3.12[38109]: ansible-systemd Invoked with name=quadlet-demo-mysql.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Jan 07 09:14:20 managed-node2 systemd[1]: Starting quadlet-demo-mysql.service... ░░ Subject: A start job for unit quadlet-demo-mysql.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-demo-mysql.service has begun execution. ░░ ░░ The job identifier is 3186. Jan 07 09:14:20 managed-node2 podman[38113]: 2025-01-07 09:14:20.979510446 -0500 EST m=+0.045287997 container create c22d23c0f7b46cca50b2c4b3810f209784c1310f514d6666ef9df66132609bfd (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service) Jan 07 09:14:20 managed-node2 systemd[23047]: Starting grub-boot-success.service - Mark boot as successful... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 118. Jan 07 09:14:21 managed-node2 NetworkManager[780]: [1736259261.0073] manager: (podman2): new Bridge device (/org/freedesktop/NetworkManager/Devices/9) Jan 07 09:14:21 managed-node2 kernel: podman2: port 1(veth2) entered blocking state Jan 07 09:14:21 managed-node2 kernel: podman2: port 1(veth2) entered disabled state Jan 07 09:14:21 managed-node2 kernel: veth2: entered allmulticast mode Jan 07 09:14:21 managed-node2 kernel: veth2: entered promiscuous mode Jan 07 09:14:21 managed-node2 NetworkManager[780]: [1736259261.0173] manager: (veth2): new Veth device (/org/freedesktop/NetworkManager/Devices/10) Jan 07 09:14:21 managed-node2 kernel: podman2: port 1(veth2) entered blocking state Jan 07 09:14:21 managed-node2 kernel: podman2: port 1(veth2) entered forwarding state Jan 07 09:14:21 managed-node2 systemd[23047]: Finished grub-boot-success.service - Mark boot as successful. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 118. Jan 07 09:14:21 managed-node2 NetworkManager[780]: [1736259261.0235] device (veth2): carrier: link connected Jan 07 09:14:21 managed-node2 NetworkManager[780]: [1736259261.0244] device (podman2): carrier: link connected Jan 07 09:14:21 managed-node2 (udev-worker)[38130]: Network interface NamePolicy= disabled on kernel command line. Jan 07 09:14:21 managed-node2 (udev-worker)[38129]: Network interface NamePolicy= disabled on kernel command line. Jan 07 09:14:21 managed-node2 NetworkManager[780]: [1736259261.0614] device (podman2): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external') Jan 07 09:14:21 managed-node2 NetworkManager[780]: [1736259261.0625] device (podman2): state change: unavailable -> disconnected (reason 'connection-assumed', managed-type: 'external') Jan 07 09:14:21 managed-node2 NetworkManager[780]: [1736259261.0631] device (podman2): Activation: starting connection 'podman2' (c24cf0c5-78fa-46c2-ae4b-64bb2b37c6d1) Jan 07 09:14:21 managed-node2 NetworkManager[780]: [1736259261.0633] device (podman2): state change: disconnected -> prepare (reason 'none', managed-type: 'external') Jan 07 09:14:21 managed-node2 NetworkManager[780]: [1736259261.0635] device (podman2): state change: prepare -> config (reason 'none', managed-type: 'external') Jan 07 09:14:21 managed-node2 NetworkManager[780]: [1736259261.0636] device (podman2): state change: config -> ip-config (reason 'none', managed-type: 'external') Jan 07 09:14:21 managed-node2 NetworkManager[780]: [1736259261.0638] device (podman2): state change: ip-config -> ip-check (reason 'none', managed-type: 'external') Jan 07 09:14:21 managed-node2 podman[38113]: 2025-01-07 09:14:20.959664842 -0500 EST m=+0.025442508 image pull dd3b2a5dcb48ff61113592ed5ddd762581be4387c7bc552375a2159422aa6bf5 quay.io/linux-system-roles/mysql:5.6 Jan 07 09:14:21 managed-node2 systemd[1]: Starting NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service... ░░ Subject: A start job for unit NetworkManager-dispatcher.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has begun execution. ░░ ░░ The job identifier is 3273. Jan 07 09:14:21 managed-node2 systemd[1]: Started NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service. ░░ Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has finished successfully. ░░ ░░ The job identifier is 3273. Jan 07 09:14:21 managed-node2 NetworkManager[780]: [1736259261.1115] device (podman2): state change: ip-check -> secondaries (reason 'none', managed-type: 'external') Jan 07 09:14:21 managed-node2 NetworkManager[780]: [1736259261.1119] device (podman2): state change: secondaries -> activated (reason 'none', managed-type: 'external') Jan 07 09:14:21 managed-node2 NetworkManager[780]: [1736259261.1124] device (podman2): Activation: successful, device activated. Jan 07 09:14:21 managed-node2 systemd[1]: Started c22d23c0f7b46cca50b2c4b3810f209784c1310f514d6666ef9df66132609bfd-7ac923f5855b5f9d.timer - /usr/bin/podman healthcheck run c22d23c0f7b46cca50b2c4b3810f209784c1310f514d6666ef9df66132609bfd. ░░ Subject: A start job for unit c22d23c0f7b46cca50b2c4b3810f209784c1310f514d6666ef9df66132609bfd-7ac923f5855b5f9d.timer has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit c22d23c0f7b46cca50b2c4b3810f209784c1310f514d6666ef9df66132609bfd-7ac923f5855b5f9d.timer has finished successfully. ░░ ░░ The job identifier is 3352. Jan 07 09:14:21 managed-node2 podman[38113]: 2025-01-07 09:14:21.192827482 -0500 EST m=+0.258605137 container init c22d23c0f7b46cca50b2c4b3810f209784c1310f514d6666ef9df66132609bfd (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service) Jan 07 09:14:21 managed-node2 systemd[1]: Started quadlet-demo-mysql.service. ░░ Subject: A start job for unit quadlet-demo-mysql.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-demo-mysql.service has finished successfully. ░░ ░░ The job identifier is 3186. Jan 07 09:14:21 managed-node2 podman[38113]: 2025-01-07 09:14:21.223745652 -0500 EST m=+0.289523310 container start c22d23c0f7b46cca50b2c4b3810f209784c1310f514d6666ef9df66132609bfd (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service) Jan 07 09:14:21 managed-node2 quadlet-demo-mysql[38113]: c22d23c0f7b46cca50b2c4b3810f209784c1310f514d6666ef9df66132609bfd Jan 07 09:14:21 managed-node2 podman[38176]: 2025-01-07 09:14:21.717094811 -0500 EST m=+0.467717036 container health_status c22d23c0f7b46cca50b2c4b3810f209784c1310f514d6666ef9df66132609bfd (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, health_status=healthy, health_failing_streak=0, health_log=, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service) Jan 07 09:14:22 managed-node2 python3.12[38374]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 07 09:14:24 managed-node2 python3.12[38537]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 07 09:14:25 managed-node2 python3.12[38668]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/envoy-proxy-configmap.yml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 07 09:14:25 managed-node2 python3.12[38773]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1736259264.7863326-20711-60529197268232/.source.yml dest=/etc/containers/systemd/envoy-proxy-configmap.yml owner=root group=0 mode=0644 _original_basename=envoy-proxy-configmap.yml follow=False checksum=d681c7d56f912150d041873e880818b22a90c188 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jan 07 09:14:26 managed-node2 python3.12[38904]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 07 09:14:26 managed-node2 systemd[1]: Reload requested from client PID 38905 ('systemctl') (unit session-6.scope)... Jan 07 09:14:26 managed-node2 systemd[1]: Reloading... Jan 07 09:14:26 managed-node2 systemd[1]: Reloading finished in 400 ms. Jan 07 09:14:26 managed-node2 systemd[1]: Starting dnf-makecache.service - dnf makecache... ░░ Subject: A start job for unit dnf-makecache.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit dnf-makecache.service has begun execution. ░░ ░░ The job identifier is 3508. Jan 07 09:14:26 managed-node2 dnf[38990]: Beaker Client - RedHatEnterpriseLinux9 11 kB/s | 1.5 kB 00:00 Jan 07 09:14:26 managed-node2 dnf[38990]: Beaker harness 18 kB/s | 1.3 kB 00:00 Jan 07 09:14:26 managed-node2 dnf[38990]: Copr repo for beakerlib-libraries owned by bgon 49 kB/s | 1.8 kB 00:00 Jan 07 09:14:27 managed-node2 dnf[38990]: CentOS Stream 10 - BaseOS 53 kB/s | 2.3 kB 00:00 Jan 07 09:14:27 managed-node2 dnf[38990]: CentOS Stream 10 - AppStream 60 kB/s | 2.3 kB 00:00 Jan 07 09:14:27 managed-node2 dnf[38990]: CentOS Stream 10 - HighAvailability 63 kB/s | 2.3 kB 00:00 Jan 07 09:14:27 managed-node2 dnf[38990]: Metadata cache created. Jan 07 09:14:27 managed-node2 systemd[1]: dnf-makecache.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit dnf-makecache.service has successfully entered the 'dead' state. Jan 07 09:14:27 managed-node2 systemd[1]: Finished dnf-makecache.service - dnf makecache. ░░ Subject: A start job for unit dnf-makecache.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit dnf-makecache.service has finished successfully. ░░ ░░ The job identifier is 3508. Jan 07 09:14:27 managed-node2 python3.12[39137]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 07 09:14:29 managed-node2 python3.12[39277]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 07 09:14:30 managed-node2 python3.12[39408]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/quadlet-demo.yml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 07 09:14:30 managed-node2 python3.12[39513]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/systemd/quadlet-demo.yml owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1736259269.9375236-20890-15337147536376/.source.yml _original_basename=.xtqxu9q2 follow=False checksum=998dccde0483b1654327a46ddd89cbaa47650370 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jan 07 09:14:31 managed-node2 systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state. Jan 07 09:14:31 managed-node2 python3.12[39667]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 07 09:14:31 managed-node2 systemd[1]: Reload requested from client PID 39669 ('systemctl') (unit session-6.scope)... Jan 07 09:14:31 managed-node2 systemd[1]: Reloading... Jan 07 09:14:31 managed-node2 systemd[1]: Reloading finished in 225 ms. Jan 07 09:14:32 managed-node2 python3.12[39855]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 07 09:14:33 managed-node2 python3.12[39988]: ansible-slurp Invoked with path=/etc/containers/systemd/quadlet-demo.yml src=/etc/containers/systemd/quadlet-demo.yml Jan 07 09:14:34 managed-node2 python3.12[40119]: ansible-file Invoked with path=/tmp/httpd3 state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Jan 07 09:14:34 managed-node2 python3.12[40250]: ansible-file Invoked with path=/tmp/httpd3-create state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Jan 07 09:14:52 managed-node2 podman[40580]: 2025-01-07 09:14:52.442310371 -0500 EST m=+0.617926399 container health_status c22d23c0f7b46cca50b2c4b3810f209784c1310f514d6666ef9df66132609bfd (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, health_status=healthy, health_failing_streak=0, health_log=, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service) Jan 07 09:14:53 managed-node2 podman[40389]: 2025-01-07 09:14:53.747429097 -0500 EST m=+18.758486426 image pull fcf3e41b8864a14d75a6d0627d3d02154e28a153aa57e8baa392cd744ffa0d0b quay.io/linux-system-roles/wordpress:4.8-apache Jan 07 09:15:02 managed-node2 podman[40826]: 2025-01-07 09:15:02.55194195 -0500 EST m=+8.328656821 image pull 5af2585e22ed1562885d9407efab74010090427be79048c2cd6a226517cc1e1d quay.io/linux-system-roles/envoyproxy:v1.25.0 Jan 07 09:15:02 managed-node2 python3.12[41090]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 07 09:15:03 managed-node2 python3.12[41221]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/quadlet-demo.kube follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 07 09:15:03 managed-node2 python3.12[41326]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1736259303.1670082-21735-227828488195663/.source.kube dest=/etc/containers/systemd/quadlet-demo.kube owner=root group=0 mode=0644 _original_basename=quadlet-demo.kube follow=False checksum=7a5c73a5d935a42431c87bcdbeb8a04ed0909dc7 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jan 07 09:15:04 managed-node2 python3.12[41457]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 07 09:15:04 managed-node2 systemd[1]: Reload requested from client PID 41458 ('systemctl') (unit session-6.scope)... Jan 07 09:15:04 managed-node2 systemd[1]: Reloading... Jan 07 09:15:04 managed-node2 systemd[1]: Reloading finished in 223 ms. Jan 07 09:15:05 managed-node2 python3.12[41644]: ansible-systemd Invoked with name=quadlet-demo.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Jan 07 09:15:05 managed-node2 systemd[1]: Starting quadlet-demo.service... ░░ Subject: A start job for unit quadlet-demo.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-demo.service has begun execution. ░░ ░░ The job identifier is 3664. Jan 07 09:15:05 managed-node2 quadlet-demo[41648]: Pods stopped: Jan 07 09:15:05 managed-node2 quadlet-demo[41648]: Pods removed: Jan 07 09:15:05 managed-node2 quadlet-demo[41648]: Secrets removed: Jan 07 09:15:05 managed-node2 quadlet-demo[41648]: Volumes removed: Jan 07 09:15:05 managed-node2 podman[41648]: 2025-01-07 09:15:05.603840603 -0500 EST m=+0.032211395 volume create wp-pv-claim Jan 07 09:15:05 managed-node2 podman[41648]: 2025-01-07 09:15:05.731730351 -0500 EST m=+0.160101142 container create f433509c99b16be8d0e6a17bbb246d7ee67124e6f7b76f5f0a151af506b6a8e2 (image=localhost/podman-pause:5.3.1-1733097600, name=a96f3a51b8d1-service, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Jan 07 09:15:05 managed-node2 podman[41648]: 2025-01-07 09:15:05.742380428 -0500 EST m=+0.170751219 volume create envoy-proxy-config Jan 07 09:15:05 managed-node2 podman[41648]: 2025-01-07 09:15:05.748870558 -0500 EST m=+0.177241365 volume create envoy-certificates Jan 07 09:15:05 managed-node2 systemd[1]: Created slice machine-libpod_pod_7cd86f9cd24930c895fc6b2568c209e946e3427408f75dcae50e4de648fa77fb.slice - cgroup machine-libpod_pod_7cd86f9cd24930c895fc6b2568c209e946e3427408f75dcae50e4de648fa77fb.slice. ░░ Subject: A start job for unit machine-libpod_pod_7cd86f9cd24930c895fc6b2568c209e946e3427408f75dcae50e4de648fa77fb.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit machine-libpod_pod_7cd86f9cd24930c895fc6b2568c209e946e3427408f75dcae50e4de648fa77fb.slice has finished successfully. ░░ ░░ The job identifier is 3751. Jan 07 09:15:05 managed-node2 podman[41648]: 2025-01-07 09:15:05.798098532 -0500 EST m=+0.226469325 container create 7cdb1162de3dfa3f1d0818f9055f20069ee37517a64da55eea9912ec1f333718 (image=localhost/podman-pause:5.3.1-1733097600, name=7cd86f9cd249-infra, pod_id=7cd86f9cd24930c895fc6b2568c209e946e3427408f75dcae50e4de648fa77fb, PODMAN_SYSTEMD_UNIT=quadlet-demo.service, io.buildah.version=1.38.0) Jan 07 09:15:05 managed-node2 podman[41648]: 2025-01-07 09:15:05.804207227 -0500 EST m=+0.232578028 pod create 7cd86f9cd24930c895fc6b2568c209e946e3427408f75dcae50e4de648fa77fb (image=, name=quadlet-demo) Jan 07 09:15:05 managed-node2 podman[41648]: 2025-01-07 09:15:05.846647839 -0500 EST m=+0.275019004 container create b6284e900e3840b08a5e47ae4df256da3cf59d74d098dd33e44ea991d48167a7 (image=quay.io/linux-system-roles/wordpress:4.8-apache, name=quadlet-demo-wordpress, pod_id=7cd86f9cd24930c895fc6b2568c209e946e3427408f75dcae50e4de648fa77fb, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Jan 07 09:15:05 managed-node2 podman[41648]: 2025-01-07 09:15:05.876274204 -0500 EST m=+0.304644999 container create 8975483c7a2ea776b03e7bbde585079344a1e57af10c6dd6be988bd99669e58b (image=quay.io/linux-system-roles/envoyproxy:v1.25.0, name=quadlet-demo-envoy, pod_id=7cd86f9cd24930c895fc6b2568c209e946e3427408f75dcae50e4de648fa77fb, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Jan 07 09:15:05 managed-node2 podman[41648]: 2025-01-07 09:15:05.877124564 -0500 EST m=+0.305495454 container restart f433509c99b16be8d0e6a17bbb246d7ee67124e6f7b76f5f0a151af506b6a8e2 (image=localhost/podman-pause:5.3.1-1733097600, name=a96f3a51b8d1-service, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Jan 07 09:15:05 managed-node2 podman[41648]: 2025-01-07 09:15:05.813482805 -0500 EST m=+0.241853898 image pull fcf3e41b8864a14d75a6d0627d3d02154e28a153aa57e8baa392cd744ffa0d0b quay.io/linux-system-roles/wordpress:4.8-apache Jan 07 09:15:05 managed-node2 podman[41648]: 2025-01-07 09:15:05.85064232 -0500 EST m=+0.279013516 image pull 5af2585e22ed1562885d9407efab74010090427be79048c2cd6a226517cc1e1d quay.io/linux-system-roles/envoyproxy:v1.25.0 Jan 07 09:15:05 managed-node2 systemd[1]: Started libpod-f433509c99b16be8d0e6a17bbb246d7ee67124e6f7b76f5f0a151af506b6a8e2.scope - libcrun container. ░░ Subject: A start job for unit libpod-f433509c99b16be8d0e6a17bbb246d7ee67124e6f7b76f5f0a151af506b6a8e2.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-f433509c99b16be8d0e6a17bbb246d7ee67124e6f7b76f5f0a151af506b6a8e2.scope has finished successfully. ░░ ░░ The job identifier is 3757. Jan 07 09:15:05 managed-node2 podman[41648]: 2025-01-07 09:15:05.932092861 -0500 EST m=+0.360463806 container init f433509c99b16be8d0e6a17bbb246d7ee67124e6f7b76f5f0a151af506b6a8e2 (image=localhost/podman-pause:5.3.1-1733097600, name=a96f3a51b8d1-service, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Jan 07 09:15:05 managed-node2 podman[41648]: 2025-01-07 09:15:05.934747592 -0500 EST m=+0.363118557 container start f433509c99b16be8d0e6a17bbb246d7ee67124e6f7b76f5f0a151af506b6a8e2 (image=localhost/podman-pause:5.3.1-1733097600, name=a96f3a51b8d1-service, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Jan 07 09:15:05 managed-node2 kernel: podman2: port 2(veth3) entered blocking state Jan 07 09:15:05 managed-node2 kernel: podman2: port 2(veth3) entered disabled state Jan 07 09:15:05 managed-node2 kernel: veth3: entered allmulticast mode Jan 07 09:15:05 managed-node2 kernel: veth3: entered promiscuous mode Jan 07 09:15:05 managed-node2 kernel: podman2: port 2(veth3) entered blocking state Jan 07 09:15:05 managed-node2 kernel: podman2: port 2(veth3) entered forwarding state Jan 07 09:15:05 managed-node2 NetworkManager[780]: [1736259305.9637] manager: (veth3): new Veth device (/org/freedesktop/NetworkManager/Devices/11) Jan 07 09:15:05 managed-node2 NetworkManager[780]: [1736259305.9679] device (veth3): carrier: link connected Jan 07 09:15:05 managed-node2 (udev-worker)[41669]: Network interface NamePolicy= disabled on kernel command line. Jan 07 09:15:06 managed-node2 systemd[1]: Started libpod-7cdb1162de3dfa3f1d0818f9055f20069ee37517a64da55eea9912ec1f333718.scope - libcrun container. ░░ Subject: A start job for unit libpod-7cdb1162de3dfa3f1d0818f9055f20069ee37517a64da55eea9912ec1f333718.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-7cdb1162de3dfa3f1d0818f9055f20069ee37517a64da55eea9912ec1f333718.scope has finished successfully. ░░ ░░ The job identifier is 3764. Jan 07 09:15:06 managed-node2 podman[41648]: 2025-01-07 09:15:06.056224391 -0500 EST m=+0.484595266 container init 7cdb1162de3dfa3f1d0818f9055f20069ee37517a64da55eea9912ec1f333718 (image=localhost/podman-pause:5.3.1-1733097600, name=7cd86f9cd249-infra, pod_id=7cd86f9cd24930c895fc6b2568c209e946e3427408f75dcae50e4de648fa77fb, PODMAN_SYSTEMD_UNIT=quadlet-demo.service, io.buildah.version=1.38.0) Jan 07 09:15:06 managed-node2 podman[41648]: 2025-01-07 09:15:06.06016321 -0500 EST m=+0.488534068 container start 7cdb1162de3dfa3f1d0818f9055f20069ee37517a64da55eea9912ec1f333718 (image=localhost/podman-pause:5.3.1-1733097600, name=7cd86f9cd249-infra, pod_id=7cd86f9cd24930c895fc6b2568c209e946e3427408f75dcae50e4de648fa77fb, PODMAN_SYSTEMD_UNIT=quadlet-demo.service, io.buildah.version=1.38.0) Jan 07 09:15:06 managed-node2 systemd[1]: Started libpod-b6284e900e3840b08a5e47ae4df256da3cf59d74d098dd33e44ea991d48167a7.scope - libcrun container. ░░ Subject: A start job for unit libpod-b6284e900e3840b08a5e47ae4df256da3cf59d74d098dd33e44ea991d48167a7.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-b6284e900e3840b08a5e47ae4df256da3cf59d74d098dd33e44ea991d48167a7.scope has finished successfully. ░░ ░░ The job identifier is 3771. Jan 07 09:15:06 managed-node2 podman[41648]: 2025-01-07 09:15:06.117341144 -0500 EST m=+0.545712103 container init b6284e900e3840b08a5e47ae4df256da3cf59d74d098dd33e44ea991d48167a7 (image=quay.io/linux-system-roles/wordpress:4.8-apache, name=quadlet-demo-wordpress, pod_id=7cd86f9cd24930c895fc6b2568c209e946e3427408f75dcae50e4de648fa77fb, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Jan 07 09:15:06 managed-node2 podman[41648]: 2025-01-07 09:15:06.120032861 -0500 EST m=+0.548403708 container start b6284e900e3840b08a5e47ae4df256da3cf59d74d098dd33e44ea991d48167a7 (image=quay.io/linux-system-roles/wordpress:4.8-apache, name=quadlet-demo-wordpress, pod_id=7cd86f9cd24930c895fc6b2568c209e946e3427408f75dcae50e4de648fa77fb, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Jan 07 09:15:07 managed-node2 systemd[1]: Started libpod-8975483c7a2ea776b03e7bbde585079344a1e57af10c6dd6be988bd99669e58b.scope - libcrun container. ░░ Subject: A start job for unit libpod-8975483c7a2ea776b03e7bbde585079344a1e57af10c6dd6be988bd99669e58b.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-8975483c7a2ea776b03e7bbde585079344a1e57af10c6dd6be988bd99669e58b.scope has finished successfully. ░░ ░░ The job identifier is 3778. Jan 07 09:15:07 managed-node2 podman[41648]: 2025-01-07 09:15:07.235011174 -0500 EST m=+1.663382056 container init 8975483c7a2ea776b03e7bbde585079344a1e57af10c6dd6be988bd99669e58b (image=quay.io/linux-system-roles/envoyproxy:v1.25.0, name=quadlet-demo-envoy, pod_id=7cd86f9cd24930c895fc6b2568c209e946e3427408f75dcae50e4de648fa77fb, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Jan 07 09:15:07 managed-node2 podman[41648]: 2025-01-07 09:15:07.238016036 -0500 EST m=+1.666386966 container start 8975483c7a2ea776b03e7bbde585079344a1e57af10c6dd6be988bd99669e58b (image=quay.io/linux-system-roles/envoyproxy:v1.25.0, name=quadlet-demo-envoy, pod_id=7cd86f9cd24930c895fc6b2568c209e946e3427408f75dcae50e4de648fa77fb, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Jan 07 09:15:07 managed-node2 podman[41648]: 2025-01-07 09:15:07.278508651 -0500 EST m=+1.706879540 pod start 7cd86f9cd24930c895fc6b2568c209e946e3427408f75dcae50e4de648fa77fb (image=, name=quadlet-demo) Jan 07 09:15:07 managed-node2 quadlet-demo[41648]: Volumes: Jan 07 09:15:07 managed-node2 quadlet-demo[41648]: wp-pv-claim Jan 07 09:15:07 managed-node2 quadlet-demo[41648]: Pod: Jan 07 09:15:07 managed-node2 quadlet-demo[41648]: 7cd86f9cd24930c895fc6b2568c209e946e3427408f75dcae50e4de648fa77fb Jan 07 09:15:07 managed-node2 quadlet-demo[41648]: Containers: Jan 07 09:15:07 managed-node2 quadlet-demo[41648]: b6284e900e3840b08a5e47ae4df256da3cf59d74d098dd33e44ea991d48167a7 Jan 07 09:15:07 managed-node2 quadlet-demo[41648]: 8975483c7a2ea776b03e7bbde585079344a1e57af10c6dd6be988bd99669e58b Jan 07 09:15:07 managed-node2 systemd[1]: Started quadlet-demo.service. ░░ Subject: A start job for unit quadlet-demo.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-demo.service has finished successfully. ░░ ░░ The job identifier is 3664. Jan 07 09:15:08 managed-node2 python3.12[42036]: ansible-ansible.legacy.command Invoked with _raw_params=ls -alrtF /etc/containers/systemd _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 07 09:15:08 managed-node2 python3.12[42168]: ansible-ansible.legacy.command Invoked with _raw_params=podman ps -a _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 07 09:15:09 managed-node2 python3.12[42307]: ansible-ansible.legacy.command Invoked with _raw_params=podman volume ls _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 07 09:15:09 managed-node2 python3.12[42445]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod ps --ctr-ids --ctr-names --ctr-status _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 07 09:15:09 managed-node2 python3.12[42584]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail; systemctl list-units | grep quadlet _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 07 09:15:10 managed-node2 python3.12[42718]: ansible-get_url Invoked with url=https://localhost:8000 dest=/run/out mode=0600 validate_certs=False force=False http_agent=ansible-httpget use_proxy=True force_basic_auth=False use_gssapi=False backup=False checksum= timeout=10 unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None headers=None tmp_dest=None ciphers=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 07 09:15:15 managed-node2 python3.12[42849]: ansible-get_url Invoked with url=https://localhost:8000 dest=/run/out mode=0600 validate_certs=False force=False http_agent=ansible-httpget use_proxy=True force_basic_auth=False use_gssapi=False backup=False checksum= timeout=10 unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None headers=None tmp_dest=None ciphers=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 07 09:15:21 managed-node2 python3.12[42980]: ansible-get_url Invoked with url=https://localhost:8000 dest=/run/out mode=0600 validate_certs=False force=False http_agent=ansible-httpget use_proxy=True force_basic_auth=False use_gssapi=False backup=False checksum= timeout=10 unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None headers=None tmp_dest=None ciphers=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 07 09:15:22 managed-node2 podman[43002]: 2025-01-07 09:15:22.792072206 -0500 EST m=+0.094428755 container health_status c22d23c0f7b46cca50b2c4b3810f209784c1310f514d6666ef9df66132609bfd (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, health_status=healthy, health_failing_streak=0, health_log=, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service) Jan 07 09:15:26 managed-node2 python3.12[43127]: ansible-get_url Invoked with url=https://localhost:8000 dest=/run/out mode=0600 validate_certs=False force=False http_agent=ansible-httpget use_proxy=True force_basic_auth=False use_gssapi=False backup=False checksum= timeout=10 unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None headers=None tmp_dest=None ciphers=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 07 09:15:32 managed-node2 python3.12[43258]: ansible-get_url Invoked with url=https://localhost:8000 dest=/run/out mode=0600 validate_certs=False force=False http_agent=ansible-httpget use_proxy=True force_basic_auth=False use_gssapi=False backup=False checksum= timeout=10 unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None headers=None tmp_dest=None ciphers=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 07 09:15:37 managed-node2 python3.12[43389]: ansible-get_url Invoked with url=https://localhost:8000 dest=/run/out mode=0600 validate_certs=False force=False http_agent=ansible-httpget use_proxy=True force_basic_auth=False use_gssapi=False backup=False checksum= timeout=10 unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None headers=None tmp_dest=None ciphers=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 07 09:15:43 managed-node2 python3.12[43520]: ansible-get_url Invoked with url=https://localhost:8000 dest=/run/out mode=0600 validate_certs=False force=False http_agent=ansible-httpget use_proxy=True force_basic_auth=False use_gssapi=False backup=False checksum= timeout=10 unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None headers=None tmp_dest=None ciphers=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 07 09:15:43 managed-node2 python3.12[43651]: ansible-ansible.legacy.command Invoked with _raw_params=journalctl -ex _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None TASK [Check] ******************************************************************* task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:148 Tuesday 07 January 2025 09:15:43 -0500 (0:00:00.468) 0:02:16.592 ******* ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "ps", "-a" ], "delta": "0:00:00.038857", "end": "2025-01-07 09:15:43.981750", "rc": 0, "start": "2025-01-07 09:15:43.942893" } STDOUT: CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES 7b54a3800ffc localhost/podman-pause:5.3.1-1733097600 2 minutes ago Up 2 minutes 5d7db1ec4fb5-service b8b3f35398f1 localhost/podman-pause:5.3.1-1733097600 2 minutes ago Up 2 minutes 0.0.0.0:15002->80/tcp 5c56b3244e88-infra 153c7d559088 quay.io/libpod/testimage:20210610 2 minutes ago Up 2 minutes 0.0.0.0:15002->80/tcp httpd2-httpd2 c8ea84d4fdcd localhost/podman-pause:5.3.1-1733097600 2 minutes ago Up 2 minutes aeb421c16034-service 0479d7891e30 localhost/podman-pause:5.3.1-1733097600 2 minutes ago Up 2 minutes 0.0.0.0:15003->80/tcp 3457d4d54eec-infra 4bd23d3a55ea quay.io/libpod/testimage:20210610 2 minutes ago Up 2 minutes 0.0.0.0:15003->80/tcp httpd3-httpd3 c22d23c0f7b4 quay.io/linux-system-roles/mysql:5.6 mysqld About a minute ago Up About a minute (healthy) 3306/tcp quadlet-demo-mysql f433509c99b1 localhost/podman-pause:5.3.1-1733097600 38 seconds ago Up 38 seconds a96f3a51b8d1-service 7cdb1162de3d localhost/podman-pause:5.3.1-1733097600 38 seconds ago Up 37 seconds 0.0.0.0:8000->8080/tcp, 0.0.0.0:9000->9901/tcp 7cd86f9cd249-infra b6284e900e38 quay.io/linux-system-roles/wordpress:4.8-apache apache2-foregroun... 38 seconds ago Up 37 seconds 0.0.0.0:8000->8080/tcp, 0.0.0.0:9000->9901/tcp, 80/tcp quadlet-demo-wordpress 8975483c7a2e quay.io/linux-system-roles/envoyproxy:v1.25.0 envoy -c /etc/env... 38 seconds ago Up 36 seconds 0.0.0.0:8000->8080/tcp, 0.0.0.0:9000->9901/tcp, 10000/tcp quadlet-demo-envoy TASK [Check pods] ************************************************************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:152 Tuesday 07 January 2025 09:15:44 -0500 (0:00:00.407) 0:02:17.000 ******* ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "pod", "ps", "--ctr-ids", "--ctr-names", "--ctr-status" ], "delta": "0:00:00.039766", "end": "2025-01-07 09:15:44.391208", "failed_when_result": false, "rc": 0, "start": "2025-01-07 09:15:44.351442" } STDOUT: POD ID NAME STATUS CREATED INFRA ID IDS NAMES STATUS 7cd86f9cd249 quadlet-demo Running 38 seconds ago 7cdb1162de3d 7cdb1162de3d,b6284e900e38,8975483c7a2e 7cd86f9cd249-infra,quadlet-demo-wordpress,quadlet-demo-envoy running,running,running 3457d4d54eec httpd3 Running 2 minutes ago 0479d7891e30 0479d7891e30,4bd23d3a55ea 3457d4d54eec-infra,httpd3-httpd3 running,running 5c56b3244e88 httpd2 Running 2 minutes ago b8b3f35398f1 b8b3f35398f1,153c7d559088 5c56b3244e88-infra,httpd2-httpd2 running,running TASK [Check systemd] *********************************************************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:157 Tuesday 07 January 2025 09:15:44 -0500 (0:00:00.409) 0:02:17.409 ******* ok: [managed-node2] => { "changed": false, "cmd": "set -euo pipefail; systemctl list-units --all | grep quadlet", "delta": "0:00:00.015076", "end": "2025-01-07 09:15:44.775828", "failed_when_result": false, "rc": 0, "start": "2025-01-07 09:15:44.760752" } STDOUT: quadlet-demo-mysql-volume.service loaded active exited quadlet-demo-mysql-volume.service quadlet-demo-mysql.service loaded active running quadlet-demo-mysql.service quadlet-demo-network.service loaded active exited quadlet-demo-network.service quadlet-demo.service loaded active running quadlet-demo.service TASK [LS] ********************************************************************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:165 Tuesday 07 January 2025 09:15:44 -0500 (0:00:00.388) 0:02:17.798 ******* ok: [managed-node2] => { "changed": false, "cmd": [ "ls", "-alrtF", "/etc/systemd/system" ], "delta": "0:00:00.005043", "end": "2025-01-07 09:15:45.166005", "failed_when_result": false, "rc": 0, "start": "2025-01-07 09:15:45.160962" } STDOUT: total 12 drwxr-xr-x. 5 root root 47 Dec 20 02:21 ../ lrwxrwxrwx. 1 root root 43 Dec 20 02:21 dbus.service -> /usr/lib/systemd/system/dbus-broker.service drwxr-xr-x. 2 root root 32 Dec 20 02:21 getty.target.wants/ lrwxrwxrwx. 1 root root 37 Dec 20 02:21 ctrl-alt-del.target -> /usr/lib/systemd/system/reboot.target drwxr-xr-x. 2 root root 48 Dec 20 02:22 network-online.target.wants/ lrwxrwxrwx. 1 root root 57 Dec 20 02:22 dbus-org.freedesktop.nm-dispatcher.service -> /usr/lib/systemd/system/NetworkManager-dispatcher.service drwxr-xr-x. 2 root root 76 Dec 20 02:22 timers.target.wants/ drwxr-xr-x. 2 root root 38 Dec 20 02:22 dev-virtio\x2dports-org.qemu.guest_agent.0.device.wants/ lrwxrwxrwx. 1 root root 41 Dec 20 02:25 default.target -> /usr/lib/systemd/system/multi-user.target drwxr-xr-x. 2 root root 31 Dec 20 02:37 remote-fs.target.wants/ drwxr-xr-x. 2 root root 119 Dec 20 02:38 cloud-init.target.wants/ drwxr-xr-x. 2 root root 4096 Dec 20 02:38 sysinit.target.wants/ drwxr-xr-x. 2 root root 113 Jan 7 09:09 sockets.target.wants/ lrwxrwxrwx. 1 root root 41 Jan 7 09:09 dbus-org.fedoraproject.FirewallD1.service -> /usr/lib/systemd/system/firewalld.service drwxr-xr-x. 12 root root 4096 Jan 7 09:12 ./ drwxr-xr-x. 2 root root 162 Jan 7 09:13 default.target.wants/ drwxr-xr-x. 2 root root 4096 Jan 7 09:13 multi-user.target.wants/ TASK [Cleanup] ***************************************************************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:172 Tuesday 07 January 2025 09:15:45 -0500 (0:00:00.405) 0:02:18.204 ******* included: fedora.linux_system_roles.podman for managed-node2 TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:3 Tuesday 07 January 2025 09:15:45 -0500 (0:00:00.136) 0:02:18.340 ******* included: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure ansible_facts used by role] **** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:3 Tuesday 07 January 2025 09:15:45 -0500 (0:00:00.088) 0:02:18.428 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if system is ostree] ************ task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:11 Tuesday 07 January 2025 09:15:45 -0500 (0:00:00.055) 0:02:18.484 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag to indicate system is ostree] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:16 Tuesday 07 January 2025 09:15:45 -0500 (0:00:00.036) 0:02:18.520 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:23 Tuesday 07 January 2025 09:15:45 -0500 (0:00:00.110) 0:02:18.631 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag if transactional-update exists] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:28 Tuesday 07 January 2025 09:15:45 -0500 (0:00:00.036) 0:02:18.667 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:32 Tuesday 07 January 2025 09:15:45 -0500 (0:00:00.034) 0:02:18.702 ******* ok: [managed-node2] => (item=RedHat.yml) => { "ansible_facts": { "__podman_packages": [ "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/vars/RedHat.yml" ], "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml" } skipping: [managed-node2] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node2] => (item=CentOS_10.yml) => { "ansible_facts": { "__podman_packages": [ "iptables-nft", "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } ok: [managed-node2] => (item=CentOS_10.yml) => { "ansible_facts": { "__podman_packages": [ "iptables-nft", "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } TASK [fedora.linux_system_roles.podman : Gather the package facts] ************* task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 Tuesday 07 January 2025 09:15:45 -0500 (0:00:00.074) 0:02:18.777 ******* ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Enable copr if requested] ************* task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:10 Tuesday 07 January 2025 09:15:46 -0500 (0:00:00.771) 0:02:19.548 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "podman_use_copr | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure required packages are installed] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:14 Tuesday 07 January 2025 09:15:46 -0500 (0:00:00.034) 0:02:19.583 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "(__podman_packages | difference(ansible_facts.packages))", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:28 Tuesday 07 January 2025 09:15:46 -0500 (0:00:00.037) 0:02:19.621 ******* skipping: [managed-node2] => { "false_condition": "__podman_is_transactional | d(false)" } TASK [fedora.linux_system_roles.podman : Reboot transactional update systems] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:33 Tuesday 07 January 2025 09:15:46 -0500 (0:00:00.035) 0:02:19.656 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if reboot is needed and not set] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:38 Tuesday 07 January 2025 09:15:46 -0500 (0:00:00.033) 0:02:19.689 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get podman version] ******************* task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:46 Tuesday 07 January 2025 09:15:46 -0500 (0:00:00.035) 0:02:19.725 ******* ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "--version" ], "delta": "0:00:00.025108", "end": "2025-01-07 09:15:47.100582", "rc": 0, "start": "2025-01-07 09:15:47.075474" } STDOUT: podman version 5.3.1 TASK [fedora.linux_system_roles.podman : Set podman version] ******************* task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:52 Tuesday 07 January 2025 09:15:47 -0500 (0:00:00.400) 0:02:20.125 ******* ok: [managed-node2] => { "ansible_facts": { "podman_version": "5.3.1" }, "changed": false } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.2 or later] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:56 Tuesday 07 January 2025 09:15:47 -0500 (0:00:00.038) 0:02:20.163 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "podman_version is version(\"4.2\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:63 Tuesday 07 January 2025 09:15:47 -0500 (0:00:00.032) 0:02:20.196 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "podman_version is version(\"4.4\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:73 Tuesday 07 January 2025 09:15:47 -0500 (0:00:00.042) 0:02:20.238 ******* META: end_host conditional evaluated to False, continuing execution for managed-node2 skipping: [managed-node2] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node2" } MSG: end_host conditional evaluated to false, continuing execution for managed-node2 TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:80 Tuesday 07 January 2025 09:15:47 -0500 (0:00:00.047) 0:02:20.286 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__has_type_pod or __has_pod_file_ext or __has_pod_file_src_ext or __has_pod_template_src_ext or __has_pod_template_src_ext_j2", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:96 Tuesday 07 January 2025 09:15:47 -0500 (0:00:00.054) 0:02:20.341 ******* META: end_host conditional evaluated to False, continuing execution for managed-node2 skipping: [managed-node2] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node2" } MSG: end_host conditional evaluated to false, continuing execution for managed-node2 TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:109 Tuesday 07 January 2025 09:15:47 -0500 (0:00:00.054) 0:02:20.395 ******* included: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Tuesday 07 January 2025 09:15:47 -0500 (0:00:00.121) 0:02:20.517 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Tuesday 07 January 2025 09:15:47 -0500 (0:00:00.039) 0:02:20.556 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Tuesday 07 January 2025 09:15:47 -0500 (0:00:00.038) 0:02:20.595 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Tuesday 07 January 2025 09:15:47 -0500 (0:00:00.049) 0:02:20.645 ******* ok: [managed-node2] => { "changed": false, "stat": { "atime": 1736258992.2408626, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1736258966.2956612, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9125782, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "117023548", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Tuesday 07 January 2025 09:15:48 -0500 (0:00:00.393) 0:02:21.038 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Tuesday 07 January 2025 09:15:48 -0500 (0:00:00.043) 0:02:21.082 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Tuesday 07 January 2025 09:15:48 -0500 (0:00:00.043) 0:02:21.125 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Tuesday 07 January 2025 09:15:48 -0500 (0:00:00.040) 0:02:21.166 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Tuesday 07 January 2025 09:15:48 -0500 (0:00:00.035) 0:02:21.201 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Tuesday 07 January 2025 09:15:48 -0500 (0:00:00.035) 0:02:21.237 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Tuesday 07 January 2025 09:15:48 -0500 (0:00:00.035) 0:02:21.273 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Tuesday 07 January 2025 09:15:48 -0500 (0:00:00.044) 0:02:21.317 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set config file paths] **************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:115 Tuesday 07 January 2025 09:15:48 -0500 (0:00:00.034) 0:02:21.351 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_container_conf_file": "/etc/containers/containers.conf.d/50-systemroles.conf", "__podman_policy_json_file": "/etc/containers/policy.json", "__podman_registries_conf_file": "/etc/containers/registries.conf.d/50-systemroles.conf", "__podman_storage_conf_file": "/etc/containers/storage.conf" }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle container.conf.d] ************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:124 Tuesday 07 January 2025 09:15:48 -0500 (0:00:00.044) 0:02:21.396 ******* included: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure containers.d exists] *********** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:5 Tuesday 07 January 2025 09:15:48 -0500 (0:00:00.076) 0:02:21.472 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update container config file] ********* task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:13 Tuesday 07 January 2025 09:15:48 -0500 (0:00:00.040) 0:02:21.513 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle registries.conf.d] ************* task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:127 Tuesday 07 January 2025 09:15:48 -0500 (0:00:00.039) 0:02:21.552 ******* included: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure registries.d exists] *********** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:5 Tuesday 07 January 2025 09:15:48 -0500 (0:00:00.087) 0:02:21.639 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update registries config file] ******** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:13 Tuesday 07 January 2025 09:15:48 -0500 (0:00:00.106) 0:02:21.746 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle storage.conf] ****************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:130 Tuesday 07 January 2025 09:15:48 -0500 (0:00:00.034) 0:02:21.780 ******* included: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure storage.conf parent dir exists] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:5 Tuesday 07 January 2025 09:15:48 -0500 (0:00:00.064) 0:02:21.845 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update storage config file] *********** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:13 Tuesday 07 January 2025 09:15:48 -0500 (0:00:00.040) 0:02:21.885 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle policy.json] ******************* task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:133 Tuesday 07 January 2025 09:15:48 -0500 (0:00:00.047) 0:02:21.933 ******* included: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure policy.json parent dir exists] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:6 Tuesday 07 January 2025 09:15:49 -0500 (0:00:00.080) 0:02:22.014 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat the policy.json file] ************ task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:14 Tuesday 07 January 2025 09:15:49 -0500 (0:00:00.043) 0:02:22.057 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get the existing policy.json] ********* task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:19 Tuesday 07 January 2025 09:15:49 -0500 (0:00:00.039) 0:02:22.096 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Write new policy.json file] *********** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:25 Tuesday 07 January 2025 09:15:49 -0500 (0:00:00.036) 0:02:22.133 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [Manage firewall for specified ports] ************************************* task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:139 Tuesday 07 January 2025 09:15:49 -0500 (0:00:00.034) 0:02:22.167 ******* included: fedora.linux_system_roles.firewall for managed-node2 TASK [fedora.linux_system_roles.firewall : Setup firewalld] ******************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:2 Tuesday 07 January 2025 09:15:49 -0500 (0:00:00.110) 0:02:22.277 ******* included: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml for managed-node2 TASK [fedora.linux_system_roles.firewall : Ensure ansible_facts used by role] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:2 Tuesday 07 January 2025 09:15:49 -0500 (0:00:00.089) 0:02:22.367 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Check if system is ostree] ********** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:10 Tuesday 07 January 2025 09:15:49 -0500 (0:00:00.054) 0:02:22.421 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __firewall_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Set flag to indicate system is ostree] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:15 Tuesday 07 January 2025 09:15:49 -0500 (0:00:00.041) 0:02:22.463 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __firewall_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:22 Tuesday 07 January 2025 09:15:49 -0500 (0:00:00.042) 0:02:22.506 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __firewall_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Set flag if transactional-update exists] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:27 Tuesday 07 January 2025 09:15:49 -0500 (0:00:00.038) 0:02:22.545 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __firewall_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Install firewalld] ****************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:31 Tuesday 07 January 2025 09:15:49 -0500 (0:00:00.033) 0:02:22.578 ******* ok: [managed-node2] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do TASK [fedora.linux_system_roles.firewall : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:43 Tuesday 07 January 2025 09:15:50 -0500 (0:00:00.819) 0:02:23.398 ******* skipping: [managed-node2] => { "false_condition": "__firewall_is_transactional | d(false)" } TASK [fedora.linux_system_roles.firewall : Reboot transactional update systems] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:48 Tuesday 07 January 2025 09:15:50 -0500 (0:00:00.034) 0:02:23.433 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Fail if reboot is needed and not set] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:53 Tuesday 07 January 2025 09:15:50 -0500 (0:00:00.035) 0:02:23.468 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Collect service facts] ************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:5 Tuesday 07 January 2025 09:15:50 -0500 (0:00:00.033) 0:02:23.502 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Attempt to stop and disable conflicting services] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:9 Tuesday 07 January 2025 09:15:50 -0500 (0:00:00.034) 0:02:23.537 ******* skipping: [managed-node2] => (item=nftables) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": "nftables", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=iptables) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": "iptables", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=ufw) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": "ufw", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => { "changed": false } MSG: All items skipped TASK [fedora.linux_system_roles.firewall : Unmask firewalld service] *********** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:22 Tuesday 07 January 2025 09:15:50 -0500 (0:00:00.040) 0:02:23.577 ******* ok: [managed-node2] => { "changed": false, "name": "firewalld", "status": { "AccessSELinuxContext": "system_u:object_r:firewalld_unit_file_t:s0", "ActiveEnterTimestamp": "Tue 2025-01-07 09:09:59 EST", "ActiveEnterTimestampMonotonic": "368272829", "ActiveExitTimestampMonotonic": "0", "ActiveState": "active", "After": "basic.target dbus.socket sysinit.target dbus-broker.service system.slice polkit.service", "AllowIsolate": "no", "AssertResult": "yes", "AssertTimestamp": "Tue 2025-01-07 09:09:58 EST", "AssertTimestampMonotonic": "367306301", "Before": "shutdown.target network-pre.target multi-user.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "BusName": "org.fedoraproject.FirewallD1", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "639943000", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "yes", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_tty_config cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Tue 2025-01-07 09:09:58 EST", "ConditionTimestampMonotonic": "367306297", "ConfigurationDirectoryMode": "0755", "Conflicts": "ipset.service ebtables.service shutdown.target ip6tables.service iptables.service", "ControlGroup": "/system.slice/firewalld.service", "ControlGroupId": "5002", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "firewalld - dynamic firewall daemon", "DeviceAllow": "char-rtc r", "DevicePolicy": "closed", "Documentation": "\"man:firewalld(1)\"", "DynamicUser": "no", "EffectiveCPUs": "0-1", "EffectiveMemoryHigh": "3698233344", "EffectiveMemoryMax": "3698233344", "EffectiveMemoryNodes": "0", "EffectiveTasksMax": "22349", "EnvironmentFiles": "/etc/sysconfig/firewalld (ignore_errors=yes)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestamp": "Tue 2025-01-07 09:09:58 EST", "ExecMainHandoffTimestampMonotonic": "367337087", "ExecMainPID": "10983", "ExecMainStartTimestamp": "Tue 2025-01-07 09:09:58 EST", "ExecMainStartTimestampMonotonic": "367309888", "ExecMainStatus": "0", "ExecReload": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecReloadEx": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStart": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/usr/lib/systemd/system/firewalld.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "firewalld.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestamp": "Tue 2025-01-07 09:09:58 EST", "InactiveExitTimestampMonotonic": "367310320", "InvocationID": "25e094e5f1f7407f91f3aca634286234", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13968", "LimitNPROCSoft": "13968", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13968", "LimitSIGPENDINGSoft": "13968", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "yes", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "10983", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "2532077568", "MemoryCurrent": "33411072", "MemoryDenyWriteExecute": "yes", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "35258368", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "0", "MemorySwapMax": "infinity", "MemorySwapPeak": "0", "MemoryZSwapCurrent": "0", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "firewalld.service dbus-org.fedoraproject.FirewallD1.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "yes", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "yes", "ProtectControlGroups": "yes", "ProtectHome": "yes", "ProtectHostname": "yes", "ProtectKernelLogs": "yes", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "yes", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "dbus.socket sysinit.target system.slice", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "yes", "RestrictSUIDSGID": "yes", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "StandardError": "null", "StandardInput": "null", "StandardOutput": "null", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestamp": "Tue 2025-01-07 09:15:04 EST", "StateChangeTimestampMonotonic": "674079799", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "running", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallArchitectures": "native", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "2", "TasksMax": "22349", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "dbus", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "enabled", "UnitFileState": "enabled", "UtmpMode": "init", "WantedBy": "multi-user.target", "Wants": "network-pre.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.firewall : Enable and start firewalld service] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:28 Tuesday 07 January 2025 09:15:51 -0500 (0:00:00.565) 0:02:24.143 ******* ok: [managed-node2] => { "changed": false, "enabled": true, "name": "firewalld", "state": "started", "status": { "AccessSELinuxContext": "system_u:object_r:firewalld_unit_file_t:s0", "ActiveEnterTimestamp": "Tue 2025-01-07 09:09:59 EST", "ActiveEnterTimestampMonotonic": "368272829", "ActiveExitTimestampMonotonic": "0", "ActiveState": "active", "After": "basic.target dbus.socket sysinit.target dbus-broker.service system.slice polkit.service", "AllowIsolate": "no", "AssertResult": "yes", "AssertTimestamp": "Tue 2025-01-07 09:09:58 EST", "AssertTimestampMonotonic": "367306301", "Before": "shutdown.target network-pre.target multi-user.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "BusName": "org.fedoraproject.FirewallD1", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "639943000", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "yes", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_tty_config cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Tue 2025-01-07 09:09:58 EST", "ConditionTimestampMonotonic": "367306297", "ConfigurationDirectoryMode": "0755", "Conflicts": "ipset.service ebtables.service shutdown.target ip6tables.service iptables.service", "ControlGroup": "/system.slice/firewalld.service", "ControlGroupId": "5002", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "firewalld - dynamic firewall daemon", "DeviceAllow": "char-rtc r", "DevicePolicy": "closed", "Documentation": "\"man:firewalld(1)\"", "DynamicUser": "no", "EffectiveCPUs": "0-1", "EffectiveMemoryHigh": "3698233344", "EffectiveMemoryMax": "3698233344", "EffectiveMemoryNodes": "0", "EffectiveTasksMax": "22349", "EnvironmentFiles": "/etc/sysconfig/firewalld (ignore_errors=yes)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestamp": "Tue 2025-01-07 09:09:58 EST", "ExecMainHandoffTimestampMonotonic": "367337087", "ExecMainPID": "10983", "ExecMainStartTimestamp": "Tue 2025-01-07 09:09:58 EST", "ExecMainStartTimestampMonotonic": "367309888", "ExecMainStatus": "0", "ExecReload": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecReloadEx": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStart": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/usr/lib/systemd/system/firewalld.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "firewalld.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestamp": "Tue 2025-01-07 09:09:58 EST", "InactiveExitTimestampMonotonic": "367310320", "InvocationID": "25e094e5f1f7407f91f3aca634286234", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13968", "LimitNPROCSoft": "13968", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13968", "LimitSIGPENDINGSoft": "13968", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "yes", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "10983", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "2531295232", "MemoryCurrent": "33411072", "MemoryDenyWriteExecute": "yes", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "35258368", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "0", "MemorySwapMax": "infinity", "MemorySwapPeak": "0", "MemoryZSwapCurrent": "0", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "firewalld.service dbus-org.fedoraproject.FirewallD1.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "yes", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "yes", "ProtectControlGroups": "yes", "ProtectHome": "yes", "ProtectHostname": "yes", "ProtectKernelLogs": "yes", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "yes", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "dbus.socket sysinit.target system.slice", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "yes", "RestrictSUIDSGID": "yes", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "StandardError": "null", "StandardInput": "null", "StandardOutput": "null", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestamp": "Tue 2025-01-07 09:15:04 EST", "StateChangeTimestampMonotonic": "674079799", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "running", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallArchitectures": "native", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "2", "TasksMax": "22349", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "dbus", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "enabled", "UnitFileState": "enabled", "UtmpMode": "init", "WantedBy": "multi-user.target", "Wants": "network-pre.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.firewall : Check if previous replaced is defined] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:34 Tuesday 07 January 2025 09:15:51 -0500 (0:00:00.569) 0:02:24.713 ******* ok: [managed-node2] => { "ansible_facts": { "__firewall_previous_replaced": false, "__firewall_python_cmd": "/usr/bin/python3.12", "__firewall_report_changed": true }, "changed": false } TASK [fedora.linux_system_roles.firewall : Get config files, checksums before and remove] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:43 Tuesday 07 January 2025 09:15:51 -0500 (0:00:00.043) 0:02:24.757 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_previous_replaced | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Tell firewall module it is able to report changed] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:55 Tuesday 07 January 2025 09:15:51 -0500 (0:00:00.035) 0:02:24.792 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_previous_replaced | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Configure firewall] ***************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:71 Tuesday 07 January 2025 09:15:51 -0500 (0:00:00.033) 0:02:24.825 ******* ok: [managed-node2] => (item={'port': '8000/tcp', 'state': 'enabled'}) => { "__firewall_changed": false, "ansible_loop_var": "item", "changed": false, "item": { "port": "8000/tcp", "state": "enabled" } } ok: [managed-node2] => (item={'port': '9000/tcp', 'state': 'enabled'}) => { "__firewall_changed": false, "ansible_loop_var": "item", "changed": false, "item": { "port": "9000/tcp", "state": "enabled" } } TASK [fedora.linux_system_roles.firewall : Gather firewall config information] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:120 Tuesday 07 January 2025 09:15:52 -0500 (0:00:01.003) 0:02:25.829 ******* skipping: [managed-node2] => (item={'port': '8000/tcp', 'state': 'enabled'}) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall | length == 1", "item": { "port": "8000/tcp", "state": "enabled" }, "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item={'port': '9000/tcp', 'state': 'enabled'}) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall | length == 1", "item": { "port": "9000/tcp", "state": "enabled" }, "skip_reason": "Conditional result was False" } skipping: [managed-node2] => { "changed": false } MSG: All items skipped TASK [fedora.linux_system_roles.firewall : Update firewalld_config fact] ******* task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:130 Tuesday 07 January 2025 09:15:52 -0500 (0:00:00.051) 0:02:25.880 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "firewall | length == 1", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Gather firewall config if no arguments] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:139 Tuesday 07 January 2025 09:15:52 -0500 (0:00:00.042) 0:02:25.923 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "firewall == None or firewall | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Update firewalld_config fact] ******* task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:144 Tuesday 07 January 2025 09:15:53 -0500 (0:00:00.054) 0:02:25.977 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "firewall == None or firewall | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Get config files, checksums after] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:153 Tuesday 07 January 2025 09:15:53 -0500 (0:00:00.063) 0:02:26.041 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_previous_replaced | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Calculate what has changed] ********* task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:163 Tuesday 07 January 2025 09:15:53 -0500 (0:00:00.055) 0:02:26.096 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_previous_replaced | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Show diffs] ************************* task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:169 Tuesday 07 January 2025 09:15:53 -0500 (0:00:00.057) 0:02:26.154 ******* skipping: [managed-node2] => { "false_condition": "__firewall_previous_replaced | bool" } TASK [Manage selinux for specified ports] ************************************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:146 Tuesday 07 January 2025 09:15:53 -0500 (0:00:00.084) 0:02:26.239 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "podman_selinux_ports | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Keep track of users that need to cancel linger] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:153 Tuesday 07 January 2025 09:15:53 -0500 (0:00:00.057) 0:02:26.297 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_cancel_user_linger": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - present] ******* task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:157 Tuesday 07 January 2025 09:15:53 -0500 (0:00:00.154) 0:02:26.451 ******* skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle credential files - present] **** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:166 Tuesday 07 January 2025 09:15:53 -0500 (0:00:00.050) 0:02:26.502 ******* skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle secrets] *********************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:175 Tuesday 07 January 2025 09:15:53 -0500 (0:00:00.059) 0:02:26.561 ******* included: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml for managed-node2 => (item=(censored due to no_log)) included: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml for managed-node2 => (item=(censored due to no_log)) included: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml for managed-node2 => (item=(censored due to no_log)) TASK [fedora.linux_system_roles.podman : Set variables part 1] ***************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:3 Tuesday 07 January 2025 09:15:53 -0500 (0:00:00.169) 0:02:26.730 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:7 Tuesday 07 January 2025 09:15:53 -0500 (0:00:00.062) 0:02:26.793 ******* included: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 => (item=(censored due to no_log)) TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Tuesday 07 January 2025 09:15:53 -0500 (0:00:00.089) 0:02:26.882 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Tuesday 07 January 2025 09:15:53 -0500 (0:00:00.047) 0:02:26.930 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Tuesday 07 January 2025 09:15:54 -0500 (0:00:00.050) 0:02:26.981 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Tuesday 07 January 2025 09:15:54 -0500 (0:00:00.064) 0:02:27.046 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Tuesday 07 January 2025 09:15:54 -0500 (0:00:00.041) 0:02:27.087 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Tuesday 07 January 2025 09:15:54 -0500 (0:00:00.039) 0:02:27.126 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Tuesday 07 January 2025 09:15:54 -0500 (0:00:00.035) 0:02:27.162 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Tuesday 07 January 2025 09:15:54 -0500 (0:00:00.035) 0:02:27.197 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Tuesday 07 January 2025 09:15:54 -0500 (0:00:00.038) 0:02:27.236 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Tuesday 07 January 2025 09:15:54 -0500 (0:00:00.042) 0:02:27.279 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Tuesday 07 January 2025 09:15:54 -0500 (0:00:00.062) 0:02:27.342 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Tuesday 07 January 2025 09:15:54 -0500 (0:00:00.057) 0:02:27.399 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set variables part 2] ***************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:14 Tuesday 07 January 2025 09:15:54 -0500 (0:00:00.073) 0:02:27.473 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_rootless": false, "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:20 Tuesday 07 January 2025 09:15:54 -0500 (0:00:00.060) 0:02:27.533 ******* included: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Tuesday 07 January 2025 09:15:54 -0500 (0:00:00.134) 0:02:27.668 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Tuesday 07 January 2025 09:15:54 -0500 (0:00:00.037) 0:02:27.706 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Tuesday 07 January 2025 09:15:54 -0500 (0:00:00.033) 0:02:27.739 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:25 Tuesday 07 January 2025 09:15:54 -0500 (0:00:00.032) 0:02:27.772 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Manage each secret] ******************* task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:41 Tuesday 07 January 2025 09:15:54 -0500 (0:00:00.033) 0:02:27.805 ******* changed: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } TASK [fedora.linux_system_roles.podman : Set variables part 1] ***************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:3 Tuesday 07 January 2025 09:15:55 -0500 (0:00:00.428) 0:02:28.234 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:7 Tuesday 07 January 2025 09:15:55 -0500 (0:00:00.044) 0:02:28.279 ******* included: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 => (item=(censored due to no_log)) TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Tuesday 07 January 2025 09:15:55 -0500 (0:00:00.061) 0:02:28.340 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Tuesday 07 January 2025 09:15:55 -0500 (0:00:00.039) 0:02:28.380 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Tuesday 07 January 2025 09:15:55 -0500 (0:00:00.037) 0:02:28.417 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Tuesday 07 January 2025 09:15:55 -0500 (0:00:00.054) 0:02:28.472 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Tuesday 07 January 2025 09:15:55 -0500 (0:00:00.034) 0:02:28.507 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Tuesday 07 January 2025 09:15:55 -0500 (0:00:00.036) 0:02:28.543 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Tuesday 07 January 2025 09:15:55 -0500 (0:00:00.035) 0:02:28.579 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Tuesday 07 January 2025 09:15:55 -0500 (0:00:00.037) 0:02:28.617 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Tuesday 07 January 2025 09:15:55 -0500 (0:00:00.057) 0:02:28.674 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Tuesday 07 January 2025 09:15:55 -0500 (0:00:00.046) 0:02:28.721 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Tuesday 07 January 2025 09:15:55 -0500 (0:00:00.038) 0:02:28.760 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Tuesday 07 January 2025 09:15:55 -0500 (0:00:00.041) 0:02:28.801 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set variables part 2] ***************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:14 Tuesday 07 January 2025 09:15:55 -0500 (0:00:00.041) 0:02:28.842 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_rootless": false, "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:20 Tuesday 07 January 2025 09:15:55 -0500 (0:00:00.047) 0:02:28.890 ******* included: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Tuesday 07 January 2025 09:15:56 -0500 (0:00:00.123) 0:02:29.013 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Tuesday 07 January 2025 09:15:56 -0500 (0:00:00.032) 0:02:29.046 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Tuesday 07 January 2025 09:15:56 -0500 (0:00:00.040) 0:02:29.087 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:25 Tuesday 07 January 2025 09:15:56 -0500 (0:00:00.051) 0:02:29.139 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Manage each secret] ******************* task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:41 Tuesday 07 January 2025 09:15:56 -0500 (0:00:00.055) 0:02:29.194 ******* changed: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } TASK [fedora.linux_system_roles.podman : Set variables part 1] ***************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:3 Tuesday 07 January 2025 09:15:56 -0500 (0:00:00.477) 0:02:29.671 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:7 Tuesday 07 January 2025 09:15:56 -0500 (0:00:00.064) 0:02:29.735 ******* included: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 => (item=(censored due to no_log)) TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Tuesday 07 January 2025 09:15:56 -0500 (0:00:00.102) 0:02:29.838 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Tuesday 07 January 2025 09:15:56 -0500 (0:00:00.060) 0:02:29.898 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Tuesday 07 January 2025 09:15:57 -0500 (0:00:00.061) 0:02:29.959 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Tuesday 07 January 2025 09:15:57 -0500 (0:00:00.077) 0:02:30.037 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Tuesday 07 January 2025 09:15:57 -0500 (0:00:00.061) 0:02:30.099 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Tuesday 07 January 2025 09:15:57 -0500 (0:00:00.057) 0:02:30.156 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Tuesday 07 January 2025 09:15:57 -0500 (0:00:00.059) 0:02:30.216 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Tuesday 07 January 2025 09:15:57 -0500 (0:00:00.057) 0:02:30.273 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Tuesday 07 January 2025 09:15:57 -0500 (0:00:00.060) 0:02:30.333 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Tuesday 07 January 2025 09:15:57 -0500 (0:00:00.058) 0:02:30.391 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Tuesday 07 January 2025 09:15:57 -0500 (0:00:00.058) 0:02:30.450 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Tuesday 07 January 2025 09:15:57 -0500 (0:00:00.061) 0:02:30.511 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set variables part 2] ***************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:14 Tuesday 07 January 2025 09:15:57 -0500 (0:00:00.141) 0:02:30.652 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_rootless": false, "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:20 Tuesday 07 January 2025 09:15:57 -0500 (0:00:00.069) 0:02:30.722 ******* included: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Tuesday 07 January 2025 09:15:57 -0500 (0:00:00.107) 0:02:30.830 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Tuesday 07 January 2025 09:15:57 -0500 (0:00:00.055) 0:02:30.885 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Tuesday 07 January 2025 09:15:57 -0500 (0:00:00.055) 0:02:30.941 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:25 Tuesday 07 January 2025 09:15:58 -0500 (0:00:00.057) 0:02:30.998 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Manage each secret] ******************* task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:41 Tuesday 07 January 2025 09:15:58 -0500 (0:00:00.054) 0:02:31.053 ******* changed: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } TASK [fedora.linux_system_roles.podman : Handle Kubernetes specifications] ***** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:182 Tuesday 07 January 2025 09:15:58 -0500 (0:00:00.485) 0:02:31.538 ******* skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Quadlet specifications] ******** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:189 Tuesday 07 January 2025 09:15:58 -0500 (0:00:00.055) 0:02:31.593 ******* included: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node2 => (item=(censored due to no_log)) included: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node2 => (item=(censored due to no_log)) included: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node2 => (item=(censored due to no_log)) included: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node2 => (item=(censored due to no_log)) included: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node2 => (item=(censored due to no_log)) included: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node2 => (item=(censored due to no_log)) TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Tuesday 07 January 2025 09:15:58 -0500 (0:00:00.260) 0:02:31.853 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_file_src": "quadlet-demo.kube", "__podman_quadlet_spec": {}, "__podman_quadlet_str": "[Install]\nWantedBy=default.target\n\n[Unit]\nRequires=quadlet-demo-mysql.service\nAfter=quadlet-demo-mysql.service\n\n[Kube]\n# Point to the yaml file in the same directory\nYaml=quadlet-demo.yml\n# Use the quadlet-demo network\nNetwork=quadlet-demo.network\n# Publish the envoy proxy data port\nPublishPort=8000:8080\n# Publish the envoy proxy admin port\nPublishPort=9000:9901\n# Use the envoy proxy config map in the same directory\nConfigMap=envoy-proxy-configmap.yml", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Tuesday 07 January 2025 09:15:58 -0500 (0:00:00.071) 0:02:31.925 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "absent", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Tuesday 07 January 2025 09:15:59 -0500 (0:00:00.060) 0:02:31.986 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_quadlet_file_src", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Tuesday 07 January 2025 09:15:59 -0500 (0:00:00.060) 0:02:32.047 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-demo", "__podman_quadlet_type": "kube", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Tuesday 07 January 2025 09:15:59 -0500 (0:00:00.078) 0:02:32.125 ******* included: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Tuesday 07 January 2025 09:15:59 -0500 (0:00:00.100) 0:02:32.226 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Tuesday 07 January 2025 09:15:59 -0500 (0:00:00.063) 0:02:32.289 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Tuesday 07 January 2025 09:15:59 -0500 (0:00:00.068) 0:02:32.358 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Tuesday 07 January 2025 09:15:59 -0500 (0:00:00.182) 0:02:32.541 ******* ok: [managed-node2] => { "changed": false, "stat": { "atime": 1736258992.2408626, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1736258966.2956612, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9125782, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "117023548", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Tuesday 07 January 2025 09:16:00 -0500 (0:00:00.431) 0:02:32.973 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Tuesday 07 January 2025 09:16:00 -0500 (0:00:00.047) 0:02:33.020 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Tuesday 07 January 2025 09:16:00 -0500 (0:00:00.042) 0:02:33.062 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Tuesday 07 January 2025 09:16:00 -0500 (0:00:00.060) 0:02:33.122 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Tuesday 07 January 2025 09:16:00 -0500 (0:00:00.063) 0:02:33.186 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Tuesday 07 January 2025 09:16:00 -0500 (0:00:00.064) 0:02:33.251 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Tuesday 07 January 2025 09:16:00 -0500 (0:00:00.062) 0:02:33.313 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Tuesday 07 January 2025 09:16:00 -0500 (0:00:00.069) 0:02:33.383 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Tuesday 07 January 2025 09:16:00 -0500 (0:00:00.114) 0:02:33.498 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": [ "quadlet-demo.yml" ], "__podman_service_name": "quadlet-demo.service", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Tuesday 07 January 2025 09:16:00 -0500 (0:00:00.161) 0:02:33.659 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Tuesday 07 January 2025 09:16:00 -0500 (0:00:00.066) 0:02:33.726 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Tuesday 07 January 2025 09:16:00 -0500 (0:00:00.058) 0:02:33.784 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/etc/containers/systemd/quadlet-demo.kube", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Tuesday 07 January 2025 09:16:00 -0500 (0:00:00.128) 0:02:33.913 ******* ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Tuesday 07 January 2025 09:16:01 -0500 (0:00:00.066) 0:02:33.980 ******* included: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4 Tuesday 07 January 2025 09:16:01 -0500 (0:00:00.150) 0:02:34.131 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stop and disable service] ************* task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 Tuesday 07 January 2025 09:16:01 -0500 (0:00:00.053) 0:02:34.184 ******* changed: [managed-node2] => { "changed": true, "enabled": false, "failed_when_result": false, "name": "quadlet-demo.service", "state": "stopped", "status": { "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0", "ActiveEnterTimestamp": "Tue 2025-01-07 09:15:07 EST", "ActiveEnterTimestampMonotonic": "676513184", "ActiveExitTimestampMonotonic": "0", "ActiveState": "active", "After": "sysinit.target quadlet-demo-mysql.service -.mount systemd-journald.socket quadlet-demo-network.service network-online.target basic.target system.slice", "AllowIsolate": "no", "AssertResult": "yes", "AssertTimestamp": "Tue 2025-01-07 09:15:05 EST", "AssertTimestampMonotonic": "674777768", "Before": "shutdown.target multi-user.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "275403000", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Tue 2025-01-07 09:15:05 EST", "ConditionTimestampMonotonic": "674777765", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroup": "/system.slice/quadlet-demo.service", "ControlGroupId": "12133", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "quadlet-demo.service", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveCPUs": "0-1", "EffectiveMemoryHigh": "3698233344", "EffectiveMemoryMax": "3698233344", "EffectiveMemoryNodes": "0", "EffectiveTasksMax": "22349", "Environment": "PODMAN_SYSTEMD_UNIT=quadlet-demo.service", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestampMonotonic": "0", "ExecMainPID": "41657", "ExecMainStartTimestamp": "Tue 2025-01-07 09:15:07 EST", "ExecMainStartTimestampMonotonic": "676513140", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube play --replace --service-container=true --network systemd-quadlet-demo --configmap /etc/containers/systemd/envoy-proxy-configmap.yml --publish 8000:8080 --publish 9000:9901 /etc/containers/systemd/quadlet-demo.yml ; ignore_errors=no ; start_time=[Tue 2025-01-07 09:15:05 EST] ; stop_time=[n/a] ; pid=41648 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube play --replace --service-container=true --network systemd-quadlet-demo --configmap /etc/containers/systemd/envoy-proxy-configmap.yml --publish 8000:8080 --publish 9000:9901 /etc/containers/systemd/quadlet-demo.yml ; flags= ; start_time=[Tue 2025-01-07 09:15:05 EST] ; stop_time=[n/a] ; pid=41648 ; code=(null) ; status=0/0 }", "ExecStopPost": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube down /etc/containers/systemd/quadlet-demo.yml ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPostEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube down /etc/containers/systemd/quadlet-demo.yml ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/run/systemd/generator/quadlet-demo.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "quadlet-demo.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestamp": "Tue 2025-01-07 09:15:05 EST", "InactiveExitTimestampMonotonic": "674780483", "InvocationID": "22683957dc8e4d09bca0b315f76519dc", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13968", "LimitNPROCSoft": "13968", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13968", "LimitSIGPENDINGSoft": "13968", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "41657", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "2537553920", "MemoryCurrent": "2957312", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "25956352", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "0", "MemorySwapMax": "infinity", "MemorySwapPeak": "0", "MemoryZSwapCurrent": "0", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "quadlet-demo.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "all", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "quadlet-demo-mysql.service -.mount quadlet-demo-network.service sysinit.target system.slice", "RequiresMountsFor": "/run/containers", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "SourcePath": "/etc/containers/systemd/quadlet-demo.kube", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestamp": "Tue 2025-01-07 09:15:07 EST", "StateChangeTimestampMonotonic": "676513184", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "running", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-demo", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "4", "TasksMax": "22349", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "WantedBy": "multi-user.target", "Wants": "network-online.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.podman : See if quadlet file exists] *********** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:33 Tuesday 07 January 2025 09:16:02 -0500 (0:00:01.348) 0:02:35.532 ******* ok: [managed-node2] => { "changed": false, "stat": { "atime": 1736259303.8772469, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "7a5c73a5d935a42431c87bcdbeb8a04ed0909dc7", "ctime": 1736259303.883247, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 591397102, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1736259303.5492442, "nlink": 1, "path": "/etc/containers/systemd/quadlet-demo.kube", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 456, "uid": 0, "version": "2739919704", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:38 Tuesday 07 January 2025 09:16:03 -0500 (0:00:00.685) 0:02:36.218 ******* included: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Slurp quadlet file] ******************* task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:6 Tuesday 07 January 2025 09:16:03 -0500 (0:00:00.136) 0:02:36.354 ******* ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:12 Tuesday 07 January 2025 09:16:03 -0500 (0:00:00.365) 0:02:36.720 ******* ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet yaml file] ************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:44 Tuesday 07 January 2025 09:16:03 -0500 (0:00:00.055) 0:02:36.775 ******* skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Reset raw variable] ******************* task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:52 Tuesday 07 January 2025 09:16:03 -0500 (0:00:00.034) 0:02:36.810 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_raw": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Remove quadlet file] ****************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42 Tuesday 07 January 2025 09:16:03 -0500 (0:00:00.035) 0:02:36.846 ******* changed: [managed-node2] => { "changed": true, "path": "/etc/containers/systemd/quadlet-demo.kube", "state": "absent" } TASK [fedora.linux_system_roles.podman : Refresh systemd] ********************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:48 Tuesday 07 January 2025 09:16:04 -0500 (0:00:00.417) 0:02:37.263 ******* ok: [managed-node2] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Remove managed resource] ************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:58 Tuesday 07 January 2025 09:16:05 -0500 (0:00:00.810) 0:02:38.074 ******* skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Remove volumes] *********************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99 Tuesday 07 January 2025 09:16:05 -0500 (0:00:00.062) 0:02:38.136 ******* skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Clear parsed podman variable] ********* task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:116 Tuesday 07 January 2025 09:16:05 -0500 (0:00:00.084) 0:02:38.221 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_parsed": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ******** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:120 Tuesday 07 January 2025 09:16:05 -0500 (0:00:00.039) 0:02:38.260 ******* changed: [managed-node2] => { "changed": true, "cmd": [ "podman", "image", "prune", "--all", "-f" ], "delta": "0:00:00.747727", "end": "2025-01-07 09:16:06.382928", "rc": 0, "start": "2025-01-07 09:16:05.635201" } STDOUT: fcf3e41b8864a14d75a6d0627d3d02154e28a153aa57e8baa392cd744ffa0d0b 5af2585e22ed1562885d9407efab74010090427be79048c2cd6a226517cc1e1d TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:131 Tuesday 07 January 2025 09:16:06 -0500 (0:00:01.178) 0:02:39.438 ******* included: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Tuesday 07 January 2025 09:16:06 -0500 (0:00:00.109) 0:02:39.548 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Tuesday 07 January 2025 09:16:06 -0500 (0:00:00.051) 0:02:39.599 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Tuesday 07 January 2025 09:16:06 -0500 (0:00:00.061) 0:02:39.661 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - images] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:141 Tuesday 07 January 2025 09:16:06 -0500 (0:00:00.068) 0:02:39.730 ******* ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "images", "-n" ], "delta": "0:00:00.031252", "end": "2025-01-07 09:16:07.167669", "rc": 0, "start": "2025-01-07 09:16:07.136417" } STDOUT: localhost/podman-pause 5.3.1-1733097600 df073f11d00f 5 minutes ago 701 kB quay.io/linux-system-roles/mysql 5.6 dd3b2a5dcb48 3 years ago 308 MB quay.io/libpod/testimage 20210610 9f9ec7f2fdef 3 years ago 7.99 MB TASK [fedora.linux_system_roles.podman : For testing and debugging - volumes] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:150 Tuesday 07 January 2025 09:16:07 -0500 (0:00:00.479) 0:02:40.209 ******* ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "volume", "ls", "-n" ], "delta": "0:00:00.028895", "end": "2025-01-07 09:16:07.627850", "rc": 0, "start": "2025-01-07 09:16:07.598955" } STDOUT: local systemd-quadlet-demo-mysql local wp-pv-claim local envoy-proxy-config local envoy-certificates TASK [fedora.linux_system_roles.podman : For testing and debugging - containers] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:159 Tuesday 07 January 2025 09:16:07 -0500 (0:00:00.446) 0:02:40.656 ******* ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "ps", "--noheading" ], "delta": "0:00:00.035482", "end": "2025-01-07 09:16:08.059715", "rc": 0, "start": "2025-01-07 09:16:08.024233" } STDOUT: 7b54a3800ffc localhost/podman-pause:5.3.1-1733097600 3 minutes ago Up 3 minutes 5d7db1ec4fb5-service b8b3f35398f1 localhost/podman-pause:5.3.1-1733097600 3 minutes ago Up 3 minutes 0.0.0.0:15002->80/tcp 5c56b3244e88-infra 153c7d559088 quay.io/libpod/testimage:20210610 3 minutes ago Up 3 minutes 0.0.0.0:15002->80/tcp httpd2-httpd2 c8ea84d4fdcd localhost/podman-pause:5.3.1-1733097600 2 minutes ago Up 2 minutes aeb421c16034-service 0479d7891e30 localhost/podman-pause:5.3.1-1733097600 2 minutes ago Up 2 minutes 0.0.0.0:15003->80/tcp 3457d4d54eec-infra 4bd23d3a55ea quay.io/libpod/testimage:20210610 2 minutes ago Up 2 minutes 0.0.0.0:15003->80/tcp httpd3-httpd3 c22d23c0f7b4 quay.io/linux-system-roles/mysql:5.6 mysqld About a minute ago Up About a minute (healthy) 3306/tcp quadlet-demo-mysql TASK [fedora.linux_system_roles.podman : For testing and debugging - networks] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:168 Tuesday 07 January 2025 09:16:08 -0500 (0:00:00.432) 0:02:41.089 ******* ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "network", "ls", "-n", "-q" ], "delta": "0:00:00.027431", "end": "2025-01-07 09:16:08.481223", "rc": 0, "start": "2025-01-07 09:16:08.453792" } STDOUT: podman podman-default-kube-network systemd-quadlet-demo TASK [fedora.linux_system_roles.podman : For testing and debugging - secrets] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:177 Tuesday 07 January 2025 09:16:08 -0500 (0:00:00.418) 0:02:41.507 ******* ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - pods] ***** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:187 Tuesday 07 January 2025 09:16:08 -0500 (0:00:00.437) 0:02:41.944 ******* ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - services] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 Tuesday 07 January 2025 09:16:09 -0500 (0:00:00.463) 0:02:42.408 ******* ok: [managed-node2] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "audit-rules.service": { "name": "audit-rules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autofs.service": { "name": "autofs.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "c22d23c0f7b46cca50b2c4b3810f209784c1310f514d6666ef9df66132609bfd-7ac923f5855b5f9d.service": { "name": "c22d23c0f7b46cca50b2c4b3810f209784c1310f514d6666ef9df66132609bfd-7ac923f5855b5f9d.service", "source": "systemd", "state": "stopped", "status": "transient" }, "capsule@.service": { "name": "capsule@.service", "source": "systemd", "state": "unknown", "status": "static" }, "certmonger.service": { "name": "certmonger.service", "source": "systemd", "state": "running", "status": "enabled" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd-restricted.service": { "name": "chronyd-restricted.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-broker.service": { "name": "dbus-broker.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.fedoraproject.FirewallD1.service": { "name": "dbus-org.fedoraproject.FirewallD1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "active", "status": "alias" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd.service": { "name": "dhcpcd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd@.service": { "name": "dhcpcd@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "ebtables.service": { "name": "ebtables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "fips-crypto-policy-overlay.service": { "name": "fips-crypto-policy-overlay.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "running", "status": "enabled" }, "fsidd.service": { "name": "fsidd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "stopped", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "grub2-systemd-integration.service": { "name": "grub2-systemd-integration.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "hv_kvp_daemon.service": { "name": "hv_kvp_daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "ip6tables.service": { "name": "ip6tables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ipset.service": { "name": "ipset.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iptables.service": { "name": "iptables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "logrotate.service": { "name": "logrotate.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm-devices-import.service": { "name": "lvm-devices-import.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "modprobe@.service": { "name": "modprobe@.service", "source": "systemd", "state": "unknown", "status": "static" }, "modprobe@configfs.service": { "name": "modprobe@configfs.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@dm_mod.service": { "name": "modprobe@dm_mod.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@drm.service": { "name": "modprobe@drm.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@efi_pstore.service": { "name": "modprobe@efi_pstore.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@fuse.service": { "name": "modprobe@fuse.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@loop.service": { "name": "modprobe@loop.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "netavark-dhcp-proxy.service": { "name": "netavark-dhcp-proxy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "netavark-firewalld-reload.service": { "name": "netavark-firewalld-reload.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "network.service": { "name": "network.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "static" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "pam_namespace.service": { "name": "pam_namespace.service", "source": "systemd", "state": "inactive", "status": "static" }, "pcscd.service": { "name": "pcscd.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "podman-auto-update.service": { "name": "podman-auto-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-clean-transient.service": { "name": "podman-clean-transient.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service": { "name": "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service", "source": "systemd", "state": "running", "status": "active" }, "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service": { "name": "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service", "source": "systemd", "state": "running", "status": "active" }, "podman-kube@.service": { "name": "podman-kube@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "podman-restart.service": { "name": "podman-restart.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman.service": { "name": "podman.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "running", "status": "static" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quadlet-demo-mysql-volume.service": { "name": "quadlet-demo-mysql-volume.service", "source": "systemd", "state": "stopped", "status": "generated" }, "quadlet-demo-mysql.service": { "name": "quadlet-demo-mysql.service", "source": "systemd", "state": "running", "status": "generated" }, "quadlet-demo-network.service": { "name": "quadlet-demo-network.service", "source": "systemd", "state": "stopped", "status": "generated" }, "quotaon-root.service": { "name": "quotaon-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "quotaon@.service": { "name": "quotaon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-svcgssd.service": { "name": "rpc-svcgssd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpmdb-migrate.service": { "name": "rpmdb-migrate.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rpmdb-rebuild.service": { "name": "rpmdb-rebuild.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "selinux-check-proper-disable.service": { "name": "selinux-check-proper-disable.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "serial-getty@ttyS0.service": { "name": "serial-getty@ttyS0.service", "source": "systemd", "state": "running", "status": "active" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ssh-host-keys-migration.service": { "name": "ssh-host-keys-migration.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-battery-check.service": { "name": "systemd-battery-check.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bless-boot.service": { "name": "systemd-bless-boot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-boot-check-no-failures.service": { "name": "systemd-boot-check-no-failures.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-boot-random-seed.service": { "name": "systemd-boot-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-boot-update.service": { "name": "systemd-boot-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-bootctl@.service": { "name": "systemd-bootctl@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-confext.service": { "name": "systemd-confext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-creds@.service": { "name": "systemd-creds@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-growfs-root.service": { "name": "systemd-growfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-growfs@.service": { "name": "systemd-growfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-clear.service": { "name": "systemd-hibernate-clear.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate-resume.service": { "name": "systemd-hibernate-resume.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald-sync@.service": { "name": "systemd-journald-sync@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-journald@.service": { "name": "systemd-journald@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-network-generator.service": { "name": "systemd-network-generator.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-networkd-wait-online.service": { "name": "systemd-networkd-wait-online.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-oomd.service": { "name": "systemd-oomd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-pcrextend@.service": { "name": "systemd-pcrextend@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrfs-root.service": { "name": "systemd-pcrfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pcrfs@.service": { "name": "systemd-pcrfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrlock-file-system.service": { "name": "systemd-pcrlock-file-system.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-code.service": { "name": "systemd-pcrlock-firmware-code.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-config.service": { "name": "systemd-pcrlock-firmware-config.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-machine-id.service": { "name": "systemd-pcrlock-machine-id.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-make-policy.service": { "name": "systemd-pcrlock-make-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-authority.service": { "name": "systemd-pcrlock-secureboot-authority.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-policy.service": { "name": "systemd-pcrlock-secureboot-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock@.service": { "name": "systemd-pcrlock@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrmachine.service": { "name": "systemd-pcrmachine.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-initrd.service": { "name": "systemd-pcrphase-initrd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-sysinit.service": { "name": "systemd-pcrphase-sysinit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase.service": { "name": "systemd-pcrphase.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-quotacheck-root.service": { "name": "systemd-quotacheck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-quotacheck@.service": { "name": "systemd-quotacheck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-repart.service": { "name": "systemd-repart.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-soft-reboot.service": { "name": "systemd-soft-reboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysext.service": { "name": "systemd-sysext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-sysext@.service": { "name": "systemd-sysext@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-sysupdate-reboot.service": { "name": "systemd-sysupdate-reboot.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysupdate.service": { "name": "systemd-sysupdate.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev-early.service": { "name": "systemd-tmpfiles-setup-dev-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup-early.service": { "name": "systemd-tpm2-setup-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup.service": { "name": "systemd-tpm2-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-load-credentials.service": { "name": "systemd-udev-load-credentials.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "active" }, "user-runtime-dir@3001.service": { "name": "user-runtime-dir@3001.service", "source": "systemd", "state": "stopped", "status": "active" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "active" }, "user@3001.service": { "name": "user@3001.service", "source": "systemd", "state": "running", "status": "active" }, "ypbind.service": { "name": "ypbind.service", "source": "systemd", "state": "stopped", "status": "not-found" } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Tuesday 07 January 2025 09:16:11 -0500 (0:00:02.502) 0:02:44.911 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Tuesday 07 January 2025 09:16:12 -0500 (0:00:00.062) 0:02:44.973 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": {}, "__podman_quadlet_str": "---\napiVersion: v1\nkind: PersistentVolumeClaim\nmetadata:\n name: wp-pv-claim\n labels:\n app: wordpress\nspec:\n accessModes:\n - ReadWriteOnce\n resources:\n requests:\n storage: 20Gi\n---\napiVersion: v1\nkind: Pod\nmetadata:\n name: quadlet-demo\nspec:\n containers:\n - name: wordpress\n image: quay.io/linux-system-roles/wordpress:4.8-apache\n env:\n - name: WORDPRESS_DB_HOST\n value: quadlet-demo-mysql\n - name: WORDPRESS_DB_PASSWORD\n valueFrom:\n secretKeyRef:\n name: mysql-root-password-kube\n key: password\n volumeMounts:\n - name: wordpress-persistent-storage\n mountPath: /var/www/html\n resources:\n requests:\n memory: \"64Mi\"\n cpu: \"250m\"\n limits:\n memory: \"128Mi\"\n cpu: \"500m\"\n - name: envoy\n image: quay.io/linux-system-roles/envoyproxy:v1.25.0\n volumeMounts:\n - name: config-volume\n mountPath: /etc/envoy\n - name: certificates\n mountPath: /etc/envoy-certificates\n env:\n - name: ENVOY_UID\n value: \"0\"\n resources:\n requests:\n memory: \"64Mi\"\n cpu: \"250m\"\n limits:\n memory: \"128Mi\"\n cpu: \"500m\"\n volumes:\n - name: config-volume\n configMap:\n name: envoy-proxy-config\n - name: certificates\n secret:\n secretName: envoy-certificates\n - name: wordpress-persistent-storage\n persistentVolumeClaim:\n claimName: wp-pv-claim\n - name: www # not used - for testing hostpath\n hostPath:\n path: /tmp/httpd3\n - name: create # not used - for testing hostpath\n hostPath:\n path: /tmp/httpd3-create\n", "__podman_quadlet_template_src": "quadlet-demo.yml.j2" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Tuesday 07 January 2025 09:16:12 -0500 (0:00:00.186) 0:02:45.160 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "absent", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Tuesday 07 January 2025 09:16:12 -0500 (0:00:00.076) 0:02:45.237 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_quadlet_str", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Tuesday 07 January 2025 09:16:12 -0500 (0:00:00.068) 0:02:45.305 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-demo", "__podman_quadlet_type": "yml", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Tuesday 07 January 2025 09:16:12 -0500 (0:00:00.095) 0:02:45.401 ******* included: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Tuesday 07 January 2025 09:16:12 -0500 (0:00:00.105) 0:02:45.507 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Tuesday 07 January 2025 09:16:12 -0500 (0:00:00.050) 0:02:45.557 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Tuesday 07 January 2025 09:16:12 -0500 (0:00:00.050) 0:02:45.608 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Tuesday 07 January 2025 09:16:12 -0500 (0:00:00.053) 0:02:45.661 ******* ok: [managed-node2] => { "changed": false, "stat": { "atime": 1736258992.2408626, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1736258966.2956612, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9125782, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "117023548", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Tuesday 07 January 2025 09:16:13 -0500 (0:00:00.407) 0:02:46.069 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Tuesday 07 January 2025 09:16:13 -0500 (0:00:00.053) 0:02:46.122 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Tuesday 07 January 2025 09:16:13 -0500 (0:00:00.061) 0:02:46.184 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Tuesday 07 January 2025 09:16:13 -0500 (0:00:00.073) 0:02:46.258 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Tuesday 07 January 2025 09:16:13 -0500 (0:00:00.080) 0:02:46.338 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Tuesday 07 January 2025 09:16:13 -0500 (0:00:00.209) 0:02:46.547 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Tuesday 07 January 2025 09:16:13 -0500 (0:00:00.049) 0:02:46.597 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Tuesday 07 January 2025 09:16:13 -0500 (0:00:00.047) 0:02:46.644 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Tuesday 07 January 2025 09:16:13 -0500 (0:00:00.050) 0:02:46.694 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": "", "__podman_service_name": "", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Tuesday 07 January 2025 09:16:13 -0500 (0:00:00.071) 0:02:46.765 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Tuesday 07 January 2025 09:16:13 -0500 (0:00:00.069) 0:02:46.835 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Tuesday 07 January 2025 09:16:13 -0500 (0:00:00.073) 0:02:46.908 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/etc/containers/systemd/quadlet-demo.yml", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Tuesday 07 January 2025 09:16:14 -0500 (0:00:00.168) 0:02:47.077 ******* ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Tuesday 07 January 2025 09:16:14 -0500 (0:00:00.072) 0:02:47.149 ******* included: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4 Tuesday 07 January 2025 09:16:14 -0500 (0:00:00.163) 0:02:47.313 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stop and disable service] ************* task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 Tuesday 07 January 2025 09:16:14 -0500 (0:00:00.118) 0:02:47.431 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_service_name | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : See if quadlet file exists] *********** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:33 Tuesday 07 January 2025 09:16:14 -0500 (0:00:00.126) 0:02:47.557 ******* ok: [managed-node2] => { "changed": false, "stat": { "atime": 1736259273.2469864, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "998dccde0483b1654327a46ddd89cbaa47650370", "ctime": 1736259270.5579636, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 520093929, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1736259270.260961, "nlink": 1, "path": "/etc/containers/systemd/quadlet-demo.yml", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 1605, "uid": 0, "version": "3423616949", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:38 Tuesday 07 January 2025 09:16:15 -0500 (0:00:00.541) 0:02:48.099 ******* included: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Slurp quadlet file] ******************* task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:6 Tuesday 07 January 2025 09:16:15 -0500 (0:00:00.138) 0:02:48.238 ******* ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:12 Tuesday 07 January 2025 09:16:15 -0500 (0:00:00.492) 0:02:48.730 ******* skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet yaml file] ************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:44 Tuesday 07 January 2025 09:16:15 -0500 (0:00:00.070) 0:02:48.800 ******* ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Reset raw variable] ******************* task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:52 Tuesday 07 January 2025 09:16:15 -0500 (0:00:00.089) 0:02:48.890 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_raw": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Remove quadlet file] ****************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42 Tuesday 07 January 2025 09:16:16 -0500 (0:00:00.086) 0:02:48.976 ******* changed: [managed-node2] => { "changed": true, "path": "/etc/containers/systemd/quadlet-demo.yml", "state": "absent" } TASK [fedora.linux_system_roles.podman : Refresh systemd] ********************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:48 Tuesday 07 January 2025 09:16:16 -0500 (0:00:00.438) 0:02:49.416 ******* ok: [managed-node2] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Remove managed resource] ************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:58 Tuesday 07 January 2025 09:16:17 -0500 (0:00:00.850) 0:02:50.266 ******* skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Remove volumes] *********************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99 Tuesday 07 January 2025 09:16:17 -0500 (0:00:00.118) 0:02:50.384 ******* changed: [managed-node2] => (item=None) => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } changed: [managed-node2] => (item=None) => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } changed: [managed-node2] => (item=None) => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } changed: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } TASK [fedora.linux_system_roles.podman : Clear parsed podman variable] ********* task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:116 Tuesday 07 January 2025 09:16:18 -0500 (0:00:01.503) 0:02:51.888 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_parsed": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ******** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:120 Tuesday 07 January 2025 09:16:18 -0500 (0:00:00.048) 0:02:51.937 ******* changed: [managed-node2] => { "changed": true, "cmd": [ "podman", "image", "prune", "--all", "-f" ], "delta": "0:00:00.028632", "end": "2025-01-07 09:16:19.326258", "rc": 0, "start": "2025-01-07 09:16:19.297626" } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:131 Tuesday 07 January 2025 09:16:19 -0500 (0:00:00.462) 0:02:52.399 ******* included: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Tuesday 07 January 2025 09:16:19 -0500 (0:00:00.112) 0:02:52.511 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Tuesday 07 January 2025 09:16:19 -0500 (0:00:00.060) 0:02:52.572 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Tuesday 07 January 2025 09:16:19 -0500 (0:00:00.069) 0:02:52.642 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - images] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:141 Tuesday 07 January 2025 09:16:19 -0500 (0:00:00.057) 0:02:52.699 ******* ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "images", "-n" ], "delta": "0:00:00.031906", "end": "2025-01-07 09:16:20.155058", "rc": 0, "start": "2025-01-07 09:16:20.123152" } STDOUT: localhost/podman-pause 5.3.1-1733097600 df073f11d00f 6 minutes ago 701 kB quay.io/linux-system-roles/mysql 5.6 dd3b2a5dcb48 3 years ago 308 MB quay.io/libpod/testimage 20210610 9f9ec7f2fdef 3 years ago 7.99 MB TASK [fedora.linux_system_roles.podman : For testing and debugging - volumes] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:150 Tuesday 07 January 2025 09:16:20 -0500 (0:00:00.537) 0:02:53.237 ******* ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "volume", "ls", "-n" ], "delta": "0:00:00.027903", "end": "2025-01-07 09:16:20.716024", "rc": 0, "start": "2025-01-07 09:16:20.688121" } STDOUT: local systemd-quadlet-demo-mysql TASK [fedora.linux_system_roles.podman : For testing and debugging - containers] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:159 Tuesday 07 January 2025 09:16:20 -0500 (0:00:00.563) 0:02:53.800 ******* ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "ps", "--noheading" ], "delta": "0:00:00.034907", "end": "2025-01-07 09:16:21.266495", "rc": 0, "start": "2025-01-07 09:16:21.231588" } STDOUT: 7b54a3800ffc localhost/podman-pause:5.3.1-1733097600 3 minutes ago Up 3 minutes 5d7db1ec4fb5-service b8b3f35398f1 localhost/podman-pause:5.3.1-1733097600 3 minutes ago Up 3 minutes 0.0.0.0:15002->80/tcp 5c56b3244e88-infra 153c7d559088 quay.io/libpod/testimage:20210610 3 minutes ago Up 3 minutes 0.0.0.0:15002->80/tcp httpd2-httpd2 c8ea84d4fdcd localhost/podman-pause:5.3.1-1733097600 3 minutes ago Up 3 minutes aeb421c16034-service 0479d7891e30 localhost/podman-pause:5.3.1-1733097600 3 minutes ago Up 3 minutes 0.0.0.0:15003->80/tcp 3457d4d54eec-infra 4bd23d3a55ea quay.io/libpod/testimage:20210610 3 minutes ago Up 3 minutes 0.0.0.0:15003->80/tcp httpd3-httpd3 c22d23c0f7b4 quay.io/linux-system-roles/mysql:5.6 mysqld 2 minutes ago Up 2 minutes (healthy) 3306/tcp quadlet-demo-mysql TASK [fedora.linux_system_roles.podman : For testing and debugging - networks] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:168 Tuesday 07 January 2025 09:16:21 -0500 (0:00:00.704) 0:02:54.504 ******* ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "network", "ls", "-n", "-q" ], "delta": "0:00:00.027525", "end": "2025-01-07 09:16:22.014998", "rc": 0, "start": "2025-01-07 09:16:21.987473" } STDOUT: podman podman-default-kube-network systemd-quadlet-demo TASK [fedora.linux_system_roles.podman : For testing and debugging - secrets] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:177 Tuesday 07 January 2025 09:16:22 -0500 (0:00:00.584) 0:02:55.088 ******* ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - pods] ***** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:187 Tuesday 07 January 2025 09:16:22 -0500 (0:00:00.483) 0:02:55.572 ******* ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - services] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 Tuesday 07 January 2025 09:16:23 -0500 (0:00:00.460) 0:02:56.032 ******* ok: [managed-node2] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "audit-rules.service": { "name": "audit-rules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autofs.service": { "name": "autofs.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "c22d23c0f7b46cca50b2c4b3810f209784c1310f514d6666ef9df66132609bfd-7ac923f5855b5f9d.service": { "name": "c22d23c0f7b46cca50b2c4b3810f209784c1310f514d6666ef9df66132609bfd-7ac923f5855b5f9d.service", "source": "systemd", "state": "stopped", "status": "transient" }, "capsule@.service": { "name": "capsule@.service", "source": "systemd", "state": "unknown", "status": "static" }, "certmonger.service": { "name": "certmonger.service", "source": "systemd", "state": "running", "status": "enabled" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd-restricted.service": { "name": "chronyd-restricted.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-broker.service": { "name": "dbus-broker.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.fedoraproject.FirewallD1.service": { "name": "dbus-org.fedoraproject.FirewallD1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "active", "status": "alias" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd.service": { "name": "dhcpcd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd@.service": { "name": "dhcpcd@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "ebtables.service": { "name": "ebtables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "fips-crypto-policy-overlay.service": { "name": "fips-crypto-policy-overlay.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "running", "status": "enabled" }, "fsidd.service": { "name": "fsidd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "stopped", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "grub2-systemd-integration.service": { "name": "grub2-systemd-integration.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "hv_kvp_daemon.service": { "name": "hv_kvp_daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "ip6tables.service": { "name": "ip6tables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ipset.service": { "name": "ipset.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iptables.service": { "name": "iptables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "logrotate.service": { "name": "logrotate.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm-devices-import.service": { "name": "lvm-devices-import.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "modprobe@.service": { "name": "modprobe@.service", "source": "systemd", "state": "unknown", "status": "static" }, "modprobe@configfs.service": { "name": "modprobe@configfs.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@dm_mod.service": { "name": "modprobe@dm_mod.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@drm.service": { "name": "modprobe@drm.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@efi_pstore.service": { "name": "modprobe@efi_pstore.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@fuse.service": { "name": "modprobe@fuse.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@loop.service": { "name": "modprobe@loop.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "netavark-dhcp-proxy.service": { "name": "netavark-dhcp-proxy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "netavark-firewalld-reload.service": { "name": "netavark-firewalld-reload.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "network.service": { "name": "network.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "static" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "pam_namespace.service": { "name": "pam_namespace.service", "source": "systemd", "state": "inactive", "status": "static" }, "pcscd.service": { "name": "pcscd.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "podman-auto-update.service": { "name": "podman-auto-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-clean-transient.service": { "name": "podman-clean-transient.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service": { "name": "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service", "source": "systemd", "state": "running", "status": "active" }, "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service": { "name": "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service", "source": "systemd", "state": "running", "status": "active" }, "podman-kube@.service": { "name": "podman-kube@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "podman-restart.service": { "name": "podman-restart.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman.service": { "name": "podman.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "running", "status": "static" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quadlet-demo-mysql-volume.service": { "name": "quadlet-demo-mysql-volume.service", "source": "systemd", "state": "stopped", "status": "generated" }, "quadlet-demo-mysql.service": { "name": "quadlet-demo-mysql.service", "source": "systemd", "state": "running", "status": "generated" }, "quadlet-demo-network.service": { "name": "quadlet-demo-network.service", "source": "systemd", "state": "stopped", "status": "generated" }, "quotaon-root.service": { "name": "quotaon-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "quotaon@.service": { "name": "quotaon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-svcgssd.service": { "name": "rpc-svcgssd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpmdb-migrate.service": { "name": "rpmdb-migrate.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rpmdb-rebuild.service": { "name": "rpmdb-rebuild.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "selinux-check-proper-disable.service": { "name": "selinux-check-proper-disable.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "serial-getty@ttyS0.service": { "name": "serial-getty@ttyS0.service", "source": "systemd", "state": "running", "status": "active" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ssh-host-keys-migration.service": { "name": "ssh-host-keys-migration.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-battery-check.service": { "name": "systemd-battery-check.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bless-boot.service": { "name": "systemd-bless-boot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-boot-check-no-failures.service": { "name": "systemd-boot-check-no-failures.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-boot-random-seed.service": { "name": "systemd-boot-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-boot-update.service": { "name": "systemd-boot-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-bootctl@.service": { "name": "systemd-bootctl@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-confext.service": { "name": "systemd-confext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-creds@.service": { "name": "systemd-creds@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-growfs-root.service": { "name": "systemd-growfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-growfs@.service": { "name": "systemd-growfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-clear.service": { "name": "systemd-hibernate-clear.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate-resume.service": { "name": "systemd-hibernate-resume.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald-sync@.service": { "name": "systemd-journald-sync@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-journald@.service": { "name": "systemd-journald@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-network-generator.service": { "name": "systemd-network-generator.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-networkd-wait-online.service": { "name": "systemd-networkd-wait-online.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-oomd.service": { "name": "systemd-oomd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-pcrextend@.service": { "name": "systemd-pcrextend@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrfs-root.service": { "name": "systemd-pcrfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pcrfs@.service": { "name": "systemd-pcrfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrlock-file-system.service": { "name": "systemd-pcrlock-file-system.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-code.service": { "name": "systemd-pcrlock-firmware-code.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-config.service": { "name": "systemd-pcrlock-firmware-config.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-machine-id.service": { "name": "systemd-pcrlock-machine-id.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-make-policy.service": { "name": "systemd-pcrlock-make-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-authority.service": { "name": "systemd-pcrlock-secureboot-authority.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-policy.service": { "name": "systemd-pcrlock-secureboot-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock@.service": { "name": "systemd-pcrlock@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrmachine.service": { "name": "systemd-pcrmachine.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-initrd.service": { "name": "systemd-pcrphase-initrd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-sysinit.service": { "name": "systemd-pcrphase-sysinit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase.service": { "name": "systemd-pcrphase.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-quotacheck-root.service": { "name": "systemd-quotacheck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-quotacheck@.service": { "name": "systemd-quotacheck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-repart.service": { "name": "systemd-repart.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-soft-reboot.service": { "name": "systemd-soft-reboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysext.service": { "name": "systemd-sysext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-sysext@.service": { "name": "systemd-sysext@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-sysupdate-reboot.service": { "name": "systemd-sysupdate-reboot.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysupdate.service": { "name": "systemd-sysupdate.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev-early.service": { "name": "systemd-tmpfiles-setup-dev-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup-early.service": { "name": "systemd-tpm2-setup-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup.service": { "name": "systemd-tpm2-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-load-credentials.service": { "name": "systemd-udev-load-credentials.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "active" }, "user-runtime-dir@3001.service": { "name": "user-runtime-dir@3001.service", "source": "systemd", "state": "stopped", "status": "active" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "active" }, "user@3001.service": { "name": "user@3001.service", "source": "systemd", "state": "running", "status": "active" }, "ypbind.service": { "name": "ypbind.service", "source": "systemd", "state": "stopped", "status": "not-found" } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Tuesday 07 January 2025 09:16:26 -0500 (0:00:03.140) 0:02:59.173 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Tuesday 07 January 2025 09:16:26 -0500 (0:00:00.116) 0:02:59.290 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_file_src": "envoy-proxy-configmap.yml", "__podman_quadlet_spec": {}, "__podman_quadlet_str": "---\napiVersion: v1\nkind: ConfigMap\nmetadata:\n name: envoy-proxy-config\ndata:\n envoy.yaml: |\n admin:\n address:\n socket_address:\n address: 0.0.0.0\n port_value: 9901\n\n static_resources:\n listeners:\n - name: listener_0\n address:\n socket_address:\n address: 0.0.0.0\n port_value: 8080\n filter_chains:\n - filters:\n - name: envoy.filters.network.http_connection_manager\n typed_config:\n \"@type\": type.googleapis.com/envoy.extensions.filters.network.http_connection_manager.v3.HttpConnectionManager\n stat_prefix: ingress_http\n codec_type: AUTO\n route_config:\n name: local_route\n virtual_hosts:\n - name: local_service\n domains: [\"*\"]\n routes:\n - match:\n prefix: \"/\"\n route:\n cluster: backend\n http_filters:\n - name: envoy.filters.http.router\n typed_config:\n \"@type\": type.googleapis.com/envoy.extensions.filters.http.router.v3.Router\n transport_socket:\n name: envoy.transport_sockets.tls\n typed_config:\n \"@type\": type.googleapis.com/envoy.extensions.transport_sockets.tls.v3.DownstreamTlsContext\n common_tls_context:\n tls_certificates:\n - certificate_chain:\n filename: /etc/envoy-certificates/certificate.pem\n private_key:\n filename: /etc/envoy-certificates/certificate.key\n clusters:\n - name: backend\n connect_timeout: 5s\n type: STATIC\n dns_refresh_rate: 1800s\n lb_policy: ROUND_ROBIN\n load_assignment:\n cluster_name: backend\n endpoints:\n - lb_endpoints:\n - endpoint:\n address:\n socket_address:\n address: 127.0.0.1\n port_value: 80", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Tuesday 07 January 2025 09:16:26 -0500 (0:00:00.158) 0:02:59.448 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "absent", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Tuesday 07 January 2025 09:16:26 -0500 (0:00:00.105) 0:02:59.554 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_quadlet_file_src", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Tuesday 07 January 2025 09:16:26 -0500 (0:00:00.114) 0:02:59.669 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_name": "envoy-proxy-configmap", "__podman_quadlet_type": "yml", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Tuesday 07 January 2025 09:16:26 -0500 (0:00:00.142) 0:02:59.812 ******* included: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Tuesday 07 January 2025 09:16:27 -0500 (0:00:00.184) 0:02:59.997 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Tuesday 07 January 2025 09:16:27 -0500 (0:00:00.130) 0:03:00.127 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Tuesday 07 January 2025 09:16:27 -0500 (0:00:00.108) 0:03:00.236 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Tuesday 07 January 2025 09:16:27 -0500 (0:00:00.107) 0:03:00.344 ******* ok: [managed-node2] => { "changed": false, "stat": { "atime": 1736258992.2408626, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1736258966.2956612, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9125782, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "117023548", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Tuesday 07 January 2025 09:16:28 -0500 (0:00:00.653) 0:03:00.997 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Tuesday 07 January 2025 09:16:28 -0500 (0:00:00.083) 0:03:01.080 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Tuesday 07 January 2025 09:16:28 -0500 (0:00:00.076) 0:03:01.157 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Tuesday 07 January 2025 09:16:28 -0500 (0:00:00.087) 0:03:01.244 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Tuesday 07 January 2025 09:16:28 -0500 (0:00:00.066) 0:03:01.311 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Tuesday 07 January 2025 09:16:28 -0500 (0:00:00.053) 0:03:01.365 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Tuesday 07 January 2025 09:16:28 -0500 (0:00:00.063) 0:03:01.428 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Tuesday 07 January 2025 09:16:28 -0500 (0:00:00.069) 0:03:01.498 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Tuesday 07 January 2025 09:16:28 -0500 (0:00:00.070) 0:03:01.568 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": "", "__podman_service_name": "", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Tuesday 07 January 2025 09:16:28 -0500 (0:00:00.078) 0:03:01.646 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Tuesday 07 January 2025 09:16:28 -0500 (0:00:00.053) 0:03:01.700 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Tuesday 07 January 2025 09:16:28 -0500 (0:00:00.056) 0:03:01.756 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/etc/containers/systemd/envoy-proxy-configmap.yml", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Tuesday 07 January 2025 09:16:28 -0500 (0:00:00.108) 0:03:01.865 ******* ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Tuesday 07 January 2025 09:16:28 -0500 (0:00:00.065) 0:03:01.931 ******* included: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4 Tuesday 07 January 2025 09:16:29 -0500 (0:00:00.141) 0:03:02.072 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stop and disable service] ************* task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 Tuesday 07 January 2025 09:16:29 -0500 (0:00:00.043) 0:03:02.116 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_service_name | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : See if quadlet file exists] *********** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:33 Tuesday 07 January 2025 09:16:29 -0500 (0:00:00.152) 0:03:02.269 ******* ok: [managed-node2] => { "changed": false, "stat": { "atime": 1736259305.7312624, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "d681c7d56f912150d041873e880818b22a90c188", "ctime": 1736259265.3909197, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 427819222, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1736259265.1099172, "nlink": 1, "path": "/etc/containers/systemd/envoy-proxy-configmap.yml", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 2102, "uid": 0, "version": "3648029456", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:38 Tuesday 07 January 2025 09:16:29 -0500 (0:00:00.441) 0:03:02.710 ******* included: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Slurp quadlet file] ******************* task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:6 Tuesday 07 January 2025 09:16:29 -0500 (0:00:00.162) 0:03:02.873 ******* ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:12 Tuesday 07 January 2025 09:16:30 -0500 (0:00:00.507) 0:03:03.380 ******* skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet yaml file] ************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:44 Tuesday 07 January 2025 09:16:30 -0500 (0:00:00.062) 0:03:03.443 ******* ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Reset raw variable] ******************* task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:52 Tuesday 07 January 2025 09:16:30 -0500 (0:00:00.105) 0:03:03.549 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_raw": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Remove quadlet file] ****************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42 Tuesday 07 January 2025 09:16:30 -0500 (0:00:00.097) 0:03:03.646 ******* changed: [managed-node2] => { "changed": true, "path": "/etc/containers/systemd/envoy-proxy-configmap.yml", "state": "absent" } TASK [fedora.linux_system_roles.podman : Refresh systemd] ********************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:48 Tuesday 07 January 2025 09:16:31 -0500 (0:00:00.523) 0:03:04.169 ******* ok: [managed-node2] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Remove managed resource] ************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:58 Tuesday 07 January 2025 09:16:32 -0500 (0:00:00.871) 0:03:05.041 ******* skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Remove volumes] *********************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99 Tuesday 07 January 2025 09:16:32 -0500 (0:00:00.100) 0:03:05.142 ******* skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Clear parsed podman variable] ********* task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:116 Tuesday 07 January 2025 09:16:32 -0500 (0:00:00.106) 0:03:05.248 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_parsed": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ******** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:120 Tuesday 07 January 2025 09:16:32 -0500 (0:00:00.083) 0:03:05.332 ******* changed: [managed-node2] => { "changed": true, "cmd": [ "podman", "image", "prune", "--all", "-f" ], "delta": "0:00:00.032900", "end": "2025-01-07 09:16:32.757179", "rc": 0, "start": "2025-01-07 09:16:32.724279" } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:131 Tuesday 07 January 2025 09:16:32 -0500 (0:00:00.493) 0:03:05.826 ******* included: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Tuesday 07 January 2025 09:16:32 -0500 (0:00:00.115) 0:03:05.941 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Tuesday 07 January 2025 09:16:33 -0500 (0:00:00.156) 0:03:06.098 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Tuesday 07 January 2025 09:16:33 -0500 (0:00:00.064) 0:03:06.163 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - images] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:141 Tuesday 07 January 2025 09:16:33 -0500 (0:00:00.072) 0:03:06.236 ******* ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "images", "-n" ], "delta": "0:00:00.031596", "end": "2025-01-07 09:16:33.711257", "rc": 0, "start": "2025-01-07 09:16:33.679661" } STDOUT: localhost/podman-pause 5.3.1-1733097600 df073f11d00f 6 minutes ago 701 kB quay.io/linux-system-roles/mysql 5.6 dd3b2a5dcb48 3 years ago 308 MB quay.io/libpod/testimage 20210610 9f9ec7f2fdef 3 years ago 7.99 MB TASK [fedora.linux_system_roles.podman : For testing and debugging - volumes] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:150 Tuesday 07 January 2025 09:16:33 -0500 (0:00:00.573) 0:03:06.810 ******* ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "volume", "ls", "-n" ], "delta": "0:00:00.029533", "end": "2025-01-07 09:16:34.293875", "rc": 0, "start": "2025-01-07 09:16:34.264342" } STDOUT: local systemd-quadlet-demo-mysql TASK [fedora.linux_system_roles.podman : For testing and debugging - containers] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:159 Tuesday 07 January 2025 09:16:34 -0500 (0:00:00.530) 0:03:07.340 ******* ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "ps", "--noheading" ], "delta": "0:00:00.036306", "end": "2025-01-07 09:16:34.759240", "rc": 0, "start": "2025-01-07 09:16:34.722934" } STDOUT: 7b54a3800ffc localhost/podman-pause:5.3.1-1733097600 3 minutes ago Up 3 minutes 5d7db1ec4fb5-service b8b3f35398f1 localhost/podman-pause:5.3.1-1733097600 3 minutes ago Up 3 minutes 0.0.0.0:15002->80/tcp 5c56b3244e88-infra 153c7d559088 quay.io/libpod/testimage:20210610 3 minutes ago Up 3 minutes 0.0.0.0:15002->80/tcp httpd2-httpd2 c8ea84d4fdcd localhost/podman-pause:5.3.1-1733097600 3 minutes ago Up 3 minutes aeb421c16034-service 0479d7891e30 localhost/podman-pause:5.3.1-1733097600 3 minutes ago Up 3 minutes 0.0.0.0:15003->80/tcp 3457d4d54eec-infra 4bd23d3a55ea quay.io/libpod/testimage:20210610 3 minutes ago Up 3 minutes 0.0.0.0:15003->80/tcp httpd3-httpd3 c22d23c0f7b4 quay.io/linux-system-roles/mysql:5.6 mysqld 2 minutes ago Up 2 minutes (healthy) 3306/tcp quadlet-demo-mysql TASK [fedora.linux_system_roles.podman : For testing and debugging - networks] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:168 Tuesday 07 January 2025 09:16:34 -0500 (0:00:00.469) 0:03:07.810 ******* ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "network", "ls", "-n", "-q" ], "delta": "0:00:00.027911", "end": "2025-01-07 09:16:35.236859", "rc": 0, "start": "2025-01-07 09:16:35.208948" } STDOUT: podman podman-default-kube-network systemd-quadlet-demo TASK [fedora.linux_system_roles.podman : For testing and debugging - secrets] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:177 Tuesday 07 January 2025 09:16:35 -0500 (0:00:00.478) 0:03:08.289 ******* ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - pods] ***** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:187 Tuesday 07 January 2025 09:16:35 -0500 (0:00:00.478) 0:03:08.767 ******* ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - services] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 Tuesday 07 January 2025 09:16:36 -0500 (0:00:00.546) 0:03:09.314 ******* ok: [managed-node2] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "audit-rules.service": { "name": "audit-rules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autofs.service": { "name": "autofs.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "c22d23c0f7b46cca50b2c4b3810f209784c1310f514d6666ef9df66132609bfd-7ac923f5855b5f9d.service": { "name": "c22d23c0f7b46cca50b2c4b3810f209784c1310f514d6666ef9df66132609bfd-7ac923f5855b5f9d.service", "source": "systemd", "state": "stopped", "status": "failed" }, "capsule@.service": { "name": "capsule@.service", "source": "systemd", "state": "unknown", "status": "static" }, "certmonger.service": { "name": "certmonger.service", "source": "systemd", "state": "running", "status": "enabled" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd-restricted.service": { "name": "chronyd-restricted.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-broker.service": { "name": "dbus-broker.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.fedoraproject.FirewallD1.service": { "name": "dbus-org.fedoraproject.FirewallD1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "active", "status": "alias" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd.service": { "name": "dhcpcd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd@.service": { "name": "dhcpcd@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "ebtables.service": { "name": "ebtables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "fips-crypto-policy-overlay.service": { "name": "fips-crypto-policy-overlay.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "running", "status": "enabled" }, "fsidd.service": { "name": "fsidd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "stopped", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "grub2-systemd-integration.service": { "name": "grub2-systemd-integration.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "hv_kvp_daemon.service": { "name": "hv_kvp_daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "ip6tables.service": { "name": "ip6tables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ipset.service": { "name": "ipset.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iptables.service": { "name": "iptables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "logrotate.service": { "name": "logrotate.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm-devices-import.service": { "name": "lvm-devices-import.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "modprobe@.service": { "name": "modprobe@.service", "source": "systemd", "state": "unknown", "status": "static" }, "modprobe@configfs.service": { "name": "modprobe@configfs.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@dm_mod.service": { "name": "modprobe@dm_mod.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@drm.service": { "name": "modprobe@drm.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@efi_pstore.service": { "name": "modprobe@efi_pstore.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@fuse.service": { "name": "modprobe@fuse.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@loop.service": { "name": "modprobe@loop.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "netavark-dhcp-proxy.service": { "name": "netavark-dhcp-proxy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "netavark-firewalld-reload.service": { "name": "netavark-firewalld-reload.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "network.service": { "name": "network.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "static" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "pam_namespace.service": { "name": "pam_namespace.service", "source": "systemd", "state": "inactive", "status": "static" }, "pcscd.service": { "name": "pcscd.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "podman-auto-update.service": { "name": "podman-auto-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-clean-transient.service": { "name": "podman-clean-transient.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service": { "name": "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service", "source": "systemd", "state": "running", "status": "active" }, "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service": { "name": "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service", "source": "systemd", "state": "running", "status": "active" }, "podman-kube@.service": { "name": "podman-kube@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "podman-restart.service": { "name": "podman-restart.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman.service": { "name": "podman.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "running", "status": "static" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quadlet-demo-mysql-volume.service": { "name": "quadlet-demo-mysql-volume.service", "source": "systemd", "state": "stopped", "status": "generated" }, "quadlet-demo-mysql.service": { "name": "quadlet-demo-mysql.service", "source": "systemd", "state": "running", "status": "generated" }, "quadlet-demo-network.service": { "name": "quadlet-demo-network.service", "source": "systemd", "state": "stopped", "status": "generated" }, "quotaon-root.service": { "name": "quotaon-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "quotaon@.service": { "name": "quotaon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-svcgssd.service": { "name": "rpc-svcgssd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpmdb-migrate.service": { "name": "rpmdb-migrate.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rpmdb-rebuild.service": { "name": "rpmdb-rebuild.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "selinux-check-proper-disable.service": { "name": "selinux-check-proper-disable.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "serial-getty@ttyS0.service": { "name": "serial-getty@ttyS0.service", "source": "systemd", "state": "running", "status": "active" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ssh-host-keys-migration.service": { "name": "ssh-host-keys-migration.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-battery-check.service": { "name": "systemd-battery-check.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bless-boot.service": { "name": "systemd-bless-boot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-boot-check-no-failures.service": { "name": "systemd-boot-check-no-failures.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-boot-random-seed.service": { "name": "systemd-boot-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-boot-update.service": { "name": "systemd-boot-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-bootctl@.service": { "name": "systemd-bootctl@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-confext.service": { "name": "systemd-confext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-creds@.service": { "name": "systemd-creds@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-growfs-root.service": { "name": "systemd-growfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-growfs@.service": { "name": "systemd-growfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-clear.service": { "name": "systemd-hibernate-clear.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate-resume.service": { "name": "systemd-hibernate-resume.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald-sync@.service": { "name": "systemd-journald-sync@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-journald@.service": { "name": "systemd-journald@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-network-generator.service": { "name": "systemd-network-generator.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-networkd-wait-online.service": { "name": "systemd-networkd-wait-online.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-oomd.service": { "name": "systemd-oomd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-pcrextend@.service": { "name": "systemd-pcrextend@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrfs-root.service": { "name": "systemd-pcrfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pcrfs@.service": { "name": "systemd-pcrfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrlock-file-system.service": { "name": "systemd-pcrlock-file-system.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-code.service": { "name": "systemd-pcrlock-firmware-code.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-config.service": { "name": "systemd-pcrlock-firmware-config.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-machine-id.service": { "name": "systemd-pcrlock-machine-id.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-make-policy.service": { "name": "systemd-pcrlock-make-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-authority.service": { "name": "systemd-pcrlock-secureboot-authority.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-policy.service": { "name": "systemd-pcrlock-secureboot-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock@.service": { "name": "systemd-pcrlock@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrmachine.service": { "name": "systemd-pcrmachine.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-initrd.service": { "name": "systemd-pcrphase-initrd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-sysinit.service": { "name": "systemd-pcrphase-sysinit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase.service": { "name": "systemd-pcrphase.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-quotacheck-root.service": { "name": "systemd-quotacheck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-quotacheck@.service": { "name": "systemd-quotacheck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-repart.service": { "name": "systemd-repart.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-soft-reboot.service": { "name": "systemd-soft-reboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysext.service": { "name": "systemd-sysext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-sysext@.service": { "name": "systemd-sysext@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-sysupdate-reboot.service": { "name": "systemd-sysupdate-reboot.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysupdate.service": { "name": "systemd-sysupdate.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev-early.service": { "name": "systemd-tmpfiles-setup-dev-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup-early.service": { "name": "systemd-tpm2-setup-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup.service": { "name": "systemd-tpm2-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-load-credentials.service": { "name": "systemd-udev-load-credentials.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "active" }, "user-runtime-dir@3001.service": { "name": "user-runtime-dir@3001.service", "source": "systemd", "state": "stopped", "status": "active" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "active" }, "user@3001.service": { "name": "user@3001.service", "source": "systemd", "state": "running", "status": "active" }, "ypbind.service": { "name": "ypbind.service", "source": "systemd", "state": "stopped", "status": "not-found" } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Tuesday 07 January 2025 09:16:38 -0500 (0:00:02.265) 0:03:11.579 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Tuesday 07 January 2025 09:16:38 -0500 (0:00:00.096) 0:03:11.675 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": {}, "__podman_quadlet_str": "[Install]\nWantedBy=default.target\n\n[Container]\nImage=quay.io/linux-system-roles/mysql:5.6\nContainerName=quadlet-demo-mysql\nVolume=quadlet-demo-mysql.volume:/var/lib/mysql\nVolume=/tmp/quadlet_demo:/var/lib/quadlet_demo:Z\nNetwork=quadlet-demo.network\nSecret=mysql-root-password-container,type=env,target=MYSQL_ROOT_PASSWORD\nHealthCmd=/bin/true\nHealthOnFailure=kill\n", "__podman_quadlet_template_src": "quadlet-demo-mysql.container.j2" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Tuesday 07 January 2025 09:16:39 -0500 (0:00:00.304) 0:03:11.980 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "absent", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Tuesday 07 January 2025 09:16:39 -0500 (0:00:00.098) 0:03:12.079 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_quadlet_str", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Tuesday 07 January 2025 09:16:39 -0500 (0:00:00.068) 0:03:12.148 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-demo-mysql", "__podman_quadlet_type": "container", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Tuesday 07 January 2025 09:16:39 -0500 (0:00:00.092) 0:03:12.240 ******* included: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Tuesday 07 January 2025 09:16:39 -0500 (0:00:00.337) 0:03:12.577 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Tuesday 07 January 2025 09:16:39 -0500 (0:00:00.124) 0:03:12.702 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Tuesday 07 January 2025 09:16:39 -0500 (0:00:00.107) 0:03:12.809 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Tuesday 07 January 2025 09:16:39 -0500 (0:00:00.108) 0:03:12.918 ******* ok: [managed-node2] => { "changed": false, "stat": { "atime": 1736258992.2408626, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1736258966.2956612, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9125782, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "117023548", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Tuesday 07 January 2025 09:16:40 -0500 (0:00:00.484) 0:03:13.402 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Tuesday 07 January 2025 09:16:40 -0500 (0:00:00.096) 0:03:13.499 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Tuesday 07 January 2025 09:16:40 -0500 (0:00:00.091) 0:03:13.591 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Tuesday 07 January 2025 09:16:40 -0500 (0:00:00.066) 0:03:13.657 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Tuesday 07 January 2025 09:16:40 -0500 (0:00:00.082) 0:03:13.739 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Tuesday 07 January 2025 09:16:40 -0500 (0:00:00.086) 0:03:13.825 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Tuesday 07 January 2025 09:16:40 -0500 (0:00:00.065) 0:03:13.891 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Tuesday 07 January 2025 09:16:41 -0500 (0:00:00.093) 0:03:13.985 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Tuesday 07 January 2025 09:16:41 -0500 (0:00:00.091) 0:03:14.077 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [ "quay.io/linux-system-roles/mysql:5.6" ], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-demo-mysql.service", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Tuesday 07 January 2025 09:16:41 -0500 (0:00:00.109) 0:03:14.186 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Tuesday 07 January 2025 09:16:41 -0500 (0:00:00.124) 0:03:14.311 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Tuesday 07 January 2025 09:16:41 -0500 (0:00:00.089) 0:03:14.400 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_images": [ "quay.io/linux-system-roles/mysql:5.6" ], "__podman_quadlet_file": "/etc/containers/systemd/quadlet-demo-mysql.container", "__podman_volumes": [ "/tmp/quadlet_demo" ] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Tuesday 07 January 2025 09:16:41 -0500 (0:00:00.357) 0:03:14.758 ******* ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Tuesday 07 January 2025 09:16:41 -0500 (0:00:00.087) 0:03:14.845 ******* included: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4 Tuesday 07 January 2025 09:16:42 -0500 (0:00:00.163) 0:03:15.009 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stop and disable service] ************* task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 Tuesday 07 January 2025 09:16:42 -0500 (0:00:00.081) 0:03:15.090 ******* changed: [managed-node2] => { "changed": true, "enabled": false, "failed_when_result": false, "name": "quadlet-demo-mysql.service", "state": "stopped", "status": { "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0", "ActiveEnterTimestamp": "Tue 2025-01-07 09:14:21 EST", "ActiveEnterTimestampMonotonic": "630426456", "ActiveExitTimestampMonotonic": "0", "ActiveState": "active", "After": "basic.target tmp.mount quadlet-demo-network.service systemd-journald.socket system.slice -.mount network-online.target quadlet-demo-mysql-volume.service sysinit.target", "AllowIsolate": "no", "AssertResult": "yes", "AssertTimestamp": "Tue 2025-01-07 09:14:20 EST", "AssertTimestampMonotonic": "630139162", "Before": "shutdown.target multi-user.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "2763962000", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Tue 2025-01-07 09:14:20 EST", "ConditionTimestampMonotonic": "630139158", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroup": "/system.slice/quadlet-demo-mysql.service", "ControlGroupId": "11023", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "yes", "DelegateControllers": "cpu cpuset io memory pids", "Description": "quadlet-demo-mysql.service", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveCPUs": "0-1", "EffectiveMemoryHigh": "3698233344", "EffectiveMemoryMax": "3698233344", "EffectiveMemoryNodes": "0", "EffectiveTasksMax": "22349", "Environment": "PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestampMonotonic": "0", "ExecMainPID": "38167", "ExecMainStartTimestamp": "Tue 2025-01-07 09:14:21 EST", "ExecMainStartTimestampMonotonic": "630426418", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman run --name quadlet-demo-mysql --cidfile=/run/quadlet-demo-mysql.cid --replace --rm --cgroups=split --network systemd-quadlet-demo --sdnotify=conmon -d -v systemd-quadlet-demo-mysql:/var/lib/mysql -v /tmp/quadlet_demo:/var/lib/quadlet_demo:Z --secret mysql-root-password-container,type=env,target=MYSQL_ROOT_PASSWORD --health-cmd /bin/true --health-on-failure kill quay.io/linux-system-roles/mysql:5.6 ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman run --name quadlet-demo-mysql --cidfile=/run/quadlet-demo-mysql.cid --replace --rm --cgroups=split --network systemd-quadlet-demo --sdnotify=conmon -d -v systemd-quadlet-demo-mysql:/var/lib/mysql -v /tmp/quadlet_demo:/var/lib/quadlet_demo:Z --secret mysql-root-password-container,type=env,target=MYSQL_ROOT_PASSWORD --health-cmd /bin/true --health-on-failure kill quay.io/linux-system-roles/mysql:5.6 ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStop": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-demo-mysql.cid ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-demo-mysql.cid ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPost": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-demo-mysql.cid ; ignore_errors=yes ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPostEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-demo-mysql.cid ; flags=ignore-failure ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/run/systemd/generator/quadlet-demo-mysql.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "quadlet-demo-mysql.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestamp": "Tue 2025-01-07 09:14:20 EST", "InactiveExitTimestampMonotonic": "630148078", "InvocationID": "c5d6bbddf6db4e3c804f11aecc8b55d0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13968", "LimitNPROCSoft": "13968", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13968", "LimitSIGPENDINGSoft": "13968", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "38167", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "2638934016", "MemoryCurrent": "601382912", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "643874816", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "0", "MemorySwapMax": "infinity", "MemorySwapPeak": "0", "MemoryZSwapCurrent": "0", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "quadlet-demo-mysql.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "all", "OOMPolicy": "continue", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "system.slice quadlet-demo-network.service quadlet-demo-mysql-volume.service sysinit.target -.mount", "RequiresMountsFor": "/run/containers /tmp/quadlet_demo", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "SourcePath": "/etc/containers/systemd/quadlet-demo-mysql.container", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestamp": "Tue 2025-01-07 09:14:21 EST", "StateChangeTimestampMonotonic": "630426456", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "running", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-demo-mysql", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "23", "TasksMax": "22349", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "WantedBy": "multi-user.target", "Wants": "network-online.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.podman : See if quadlet file exists] *********** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:33 Tuesday 07 January 2025 09:16:44 -0500 (0:00:02.243) 0:03:17.333 ******* ok: [managed-node2] => { "changed": false, "stat": { "atime": 1736259259.3758686, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "ca62b2ad3cc9afb5b5371ebbf797b9bc4fd7edd4", "ctime": 1736259259.3818686, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 272629994, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1736259259.071866, "nlink": 1, "path": "/etc/containers/systemd/quadlet-demo-mysql.container", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 363, "uid": 0, "version": "2619164601", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:38 Tuesday 07 January 2025 09:16:44 -0500 (0:00:00.520) 0:03:17.853 ******* included: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Slurp quadlet file] ******************* task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:6 Tuesday 07 January 2025 09:16:45 -0500 (0:00:00.165) 0:03:18.018 ******* ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:12 Tuesday 07 January 2025 09:16:45 -0500 (0:00:00.486) 0:03:18.505 ******* ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet yaml file] ************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:44 Tuesday 07 January 2025 09:16:45 -0500 (0:00:00.116) 0:03:18.621 ******* skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Reset raw variable] ******************* task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:52 Tuesday 07 January 2025 09:16:45 -0500 (0:00:00.126) 0:03:18.748 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_raw": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Remove quadlet file] ****************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42 Tuesday 07 January 2025 09:16:45 -0500 (0:00:00.082) 0:03:18.830 ******* changed: [managed-node2] => { "changed": true, "path": "/etc/containers/systemd/quadlet-demo-mysql.container", "state": "absent" } TASK [fedora.linux_system_roles.podman : Refresh systemd] ********************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:48 Tuesday 07 January 2025 09:16:46 -0500 (0:00:00.452) 0:03:19.283 ******* ok: [managed-node2] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Remove managed resource] ************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:58 Tuesday 07 January 2025 09:16:47 -0500 (0:00:00.804) 0:03:20.087 ******* ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Remove volumes] *********************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99 Tuesday 07 January 2025 09:16:47 -0500 (0:00:00.488) 0:03:20.576 ******* skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Clear parsed podman variable] ********* task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:116 Tuesday 07 January 2025 09:16:47 -0500 (0:00:00.084) 0:03:20.660 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_parsed": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ******** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:120 Tuesday 07 January 2025 09:16:47 -0500 (0:00:00.164) 0:03:20.824 ******* changed: [managed-node2] => { "changed": true, "cmd": [ "podman", "image", "prune", "--all", "-f" ], "delta": "0:00:00.247867", "end": "2025-01-07 09:16:48.439706", "rc": 0, "start": "2025-01-07 09:16:48.191839" } STDOUT: dd3b2a5dcb48ff61113592ed5ddd762581be4387c7bc552375a2159422aa6bf5 TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:131 Tuesday 07 January 2025 09:16:48 -0500 (0:00:00.638) 0:03:21.463 ******* included: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Tuesday 07 January 2025 09:16:48 -0500 (0:00:00.066) 0:03:21.529 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Tuesday 07 January 2025 09:16:48 -0500 (0:00:00.038) 0:03:21.568 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Tuesday 07 January 2025 09:16:48 -0500 (0:00:00.037) 0:03:21.605 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - images] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:141 Tuesday 07 January 2025 09:16:48 -0500 (0:00:00.038) 0:03:21.643 ******* ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "images", "-n" ], "delta": "0:00:00.030500", "end": "2025-01-07 09:16:49.032586", "rc": 0, "start": "2025-01-07 09:16:49.002086" } STDOUT: localhost/podman-pause 5.3.1-1733097600 df073f11d00f 6 minutes ago 701 kB quay.io/libpod/testimage 20210610 9f9ec7f2fdef 3 years ago 7.99 MB TASK [fedora.linux_system_roles.podman : For testing and debugging - volumes] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:150 Tuesday 07 January 2025 09:16:49 -0500 (0:00:00.413) 0:03:22.057 ******* ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "volume", "ls", "-n" ], "delta": "0:00:00.029472", "end": "2025-01-07 09:16:49.446406", "rc": 0, "start": "2025-01-07 09:16:49.416934" } STDOUT: local systemd-quadlet-demo-mysql TASK [fedora.linux_system_roles.podman : For testing and debugging - containers] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:159 Tuesday 07 January 2025 09:16:49 -0500 (0:00:00.411) 0:03:22.468 ******* ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "ps", "--noheading" ], "delta": "0:00:00.034119", "end": "2025-01-07 09:16:49.866641", "rc": 0, "start": "2025-01-07 09:16:49.832522" } STDOUT: 7b54a3800ffc localhost/podman-pause:5.3.1-1733097600 3 minutes ago Up 3 minutes 5d7db1ec4fb5-service b8b3f35398f1 localhost/podman-pause:5.3.1-1733097600 3 minutes ago Up 3 minutes 0.0.0.0:15002->80/tcp 5c56b3244e88-infra 153c7d559088 quay.io/libpod/testimage:20210610 3 minutes ago Up 3 minutes 0.0.0.0:15002->80/tcp httpd2-httpd2 c8ea84d4fdcd localhost/podman-pause:5.3.1-1733097600 3 minutes ago Up 3 minutes aeb421c16034-service 0479d7891e30 localhost/podman-pause:5.3.1-1733097600 3 minutes ago Up 3 minutes 0.0.0.0:15003->80/tcp 3457d4d54eec-infra 4bd23d3a55ea quay.io/libpod/testimage:20210610 3 minutes ago Up 3 minutes 0.0.0.0:15003->80/tcp httpd3-httpd3 TASK [fedora.linux_system_roles.podman : For testing and debugging - networks] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:168 Tuesday 07 January 2025 09:16:49 -0500 (0:00:00.422) 0:03:22.890 ******* ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "network", "ls", "-n", "-q" ], "delta": "0:00:00.027548", "end": "2025-01-07 09:16:50.277135", "rc": 0, "start": "2025-01-07 09:16:50.249587" } STDOUT: podman podman-default-kube-network systemd-quadlet-demo TASK [fedora.linux_system_roles.podman : For testing and debugging - secrets] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:177 Tuesday 07 January 2025 09:16:50 -0500 (0:00:00.410) 0:03:23.301 ******* ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - pods] ***** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:187 Tuesday 07 January 2025 09:16:50 -0500 (0:00:00.417) 0:03:23.718 ******* ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - services] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 Tuesday 07 January 2025 09:16:51 -0500 (0:00:00.434) 0:03:24.153 ******* ok: [managed-node2] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "running", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "audit-rules.service": { "name": "audit-rules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autofs.service": { "name": "autofs.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "capsule@.service": { "name": "capsule@.service", "source": "systemd", "state": "unknown", "status": "static" }, "certmonger.service": { "name": "certmonger.service", "source": "systemd", "state": "running", "status": "enabled" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd-restricted.service": { "name": "chronyd-restricted.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-broker.service": { "name": "dbus-broker.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.fedoraproject.FirewallD1.service": { "name": "dbus-org.fedoraproject.FirewallD1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "active", "status": "alias" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd.service": { "name": "dhcpcd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd@.service": { "name": "dhcpcd@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "ebtables.service": { "name": "ebtables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "fips-crypto-policy-overlay.service": { "name": "fips-crypto-policy-overlay.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "running", "status": "enabled" }, "fsidd.service": { "name": "fsidd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "stopped", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "grub2-systemd-integration.service": { "name": "grub2-systemd-integration.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "hv_kvp_daemon.service": { "name": "hv_kvp_daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "ip6tables.service": { "name": "ip6tables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ipset.service": { "name": "ipset.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iptables.service": { "name": "iptables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "logrotate.service": { "name": "logrotate.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm-devices-import.service": { "name": "lvm-devices-import.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "modprobe@.service": { "name": "modprobe@.service", "source": "systemd", "state": "unknown", "status": "static" }, "modprobe@configfs.service": { "name": "modprobe@configfs.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@dm_mod.service": { "name": "modprobe@dm_mod.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@drm.service": { "name": "modprobe@drm.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@efi_pstore.service": { "name": "modprobe@efi_pstore.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@fuse.service": { "name": "modprobe@fuse.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@loop.service": { "name": "modprobe@loop.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "netavark-dhcp-proxy.service": { "name": "netavark-dhcp-proxy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "netavark-firewalld-reload.service": { "name": "netavark-firewalld-reload.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "network.service": { "name": "network.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "static" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "pam_namespace.service": { "name": "pam_namespace.service", "source": "systemd", "state": "inactive", "status": "static" }, "pcscd.service": { "name": "pcscd.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "podman-auto-update.service": { "name": "podman-auto-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-clean-transient.service": { "name": "podman-clean-transient.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service": { "name": "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service", "source": "systemd", "state": "running", "status": "active" }, "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service": { "name": "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service", "source": "systemd", "state": "running", "status": "active" }, "podman-kube@.service": { "name": "podman-kube@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "podman-restart.service": { "name": "podman-restart.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman.service": { "name": "podman.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "running", "status": "static" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quadlet-demo-mysql-volume.service": { "name": "quadlet-demo-mysql-volume.service", "source": "systemd", "state": "stopped", "status": "generated" }, "quadlet-demo-network.service": { "name": "quadlet-demo-network.service", "source": "systemd", "state": "stopped", "status": "generated" }, "quotaon-root.service": { "name": "quotaon-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "quotaon@.service": { "name": "quotaon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-svcgssd.service": { "name": "rpc-svcgssd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpmdb-migrate.service": { "name": "rpmdb-migrate.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rpmdb-rebuild.service": { "name": "rpmdb-rebuild.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "selinux-check-proper-disable.service": { "name": "selinux-check-proper-disable.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "serial-getty@ttyS0.service": { "name": "serial-getty@ttyS0.service", "source": "systemd", "state": "running", "status": "active" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ssh-host-keys-migration.service": { "name": "ssh-host-keys-migration.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-battery-check.service": { "name": "systemd-battery-check.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bless-boot.service": { "name": "systemd-bless-boot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-boot-check-no-failures.service": { "name": "systemd-boot-check-no-failures.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-boot-random-seed.service": { "name": "systemd-boot-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-boot-update.service": { "name": "systemd-boot-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-bootctl@.service": { "name": "systemd-bootctl@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-confext.service": { "name": "systemd-confext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-creds@.service": { "name": "systemd-creds@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-growfs-root.service": { "name": "systemd-growfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-growfs@.service": { "name": "systemd-growfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-clear.service": { "name": "systemd-hibernate-clear.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate-resume.service": { "name": "systemd-hibernate-resume.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald-sync@.service": { "name": "systemd-journald-sync@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-journald@.service": { "name": "systemd-journald@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-network-generator.service": { "name": "systemd-network-generator.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-networkd-wait-online.service": { "name": "systemd-networkd-wait-online.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-oomd.service": { "name": "systemd-oomd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-pcrextend@.service": { "name": "systemd-pcrextend@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrfs-root.service": { "name": "systemd-pcrfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pcrfs@.service": { "name": "systemd-pcrfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrlock-file-system.service": { "name": "systemd-pcrlock-file-system.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-code.service": { "name": "systemd-pcrlock-firmware-code.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-config.service": { "name": "systemd-pcrlock-firmware-config.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-machine-id.service": { "name": "systemd-pcrlock-machine-id.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-make-policy.service": { "name": "systemd-pcrlock-make-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-authority.service": { "name": "systemd-pcrlock-secureboot-authority.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-policy.service": { "name": "systemd-pcrlock-secureboot-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock@.service": { "name": "systemd-pcrlock@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrmachine.service": { "name": "systemd-pcrmachine.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-initrd.service": { "name": "systemd-pcrphase-initrd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-sysinit.service": { "name": "systemd-pcrphase-sysinit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase.service": { "name": "systemd-pcrphase.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-quotacheck-root.service": { "name": "systemd-quotacheck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-quotacheck@.service": { "name": "systemd-quotacheck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-repart.service": { "name": "systemd-repart.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-soft-reboot.service": { "name": "systemd-soft-reboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysext.service": { "name": "systemd-sysext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-sysext@.service": { "name": "systemd-sysext@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-sysupdate-reboot.service": { "name": "systemd-sysupdate-reboot.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysupdate.service": { "name": "systemd-sysupdate.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev-early.service": { "name": "systemd-tmpfiles-setup-dev-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup-early.service": { "name": "systemd-tpm2-setup-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup.service": { "name": "systemd-tpm2-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-load-credentials.service": { "name": "systemd-udev-load-credentials.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "active" }, "user-runtime-dir@3001.service": { "name": "user-runtime-dir@3001.service", "source": "systemd", "state": "stopped", "status": "active" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "active" }, "user@3001.service": { "name": "user@3001.service", "source": "systemd", "state": "running", "status": "active" }, "ypbind.service": { "name": "ypbind.service", "source": "systemd", "state": "stopped", "status": "not-found" } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Tuesday 07 January 2025 09:16:53 -0500 (0:00:02.043) 0:03:26.196 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Tuesday 07 January 2025 09:16:53 -0500 (0:00:00.043) 0:03:26.240 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_file_src": "quadlet-demo-mysql.volume", "__podman_quadlet_spec": {}, "__podman_quadlet_str": "[Volume]", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Tuesday 07 January 2025 09:16:53 -0500 (0:00:00.147) 0:03:26.387 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "absent", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Tuesday 07 January 2025 09:16:53 -0500 (0:00:00.047) 0:03:26.435 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_quadlet_file_src", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Tuesday 07 January 2025 09:16:53 -0500 (0:00:00.041) 0:03:26.476 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-demo-mysql", "__podman_quadlet_type": "volume", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Tuesday 07 January 2025 09:16:53 -0500 (0:00:00.058) 0:03:26.534 ******* included: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Tuesday 07 January 2025 09:16:53 -0500 (0:00:00.071) 0:03:26.606 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Tuesday 07 January 2025 09:16:53 -0500 (0:00:00.045) 0:03:26.651 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Tuesday 07 January 2025 09:16:53 -0500 (0:00:00.044) 0:03:26.696 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Tuesday 07 January 2025 09:16:53 -0500 (0:00:00.053) 0:03:26.749 ******* ok: [managed-node2] => { "changed": false, "stat": { "atime": 1736258992.2408626, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1736258966.2956612, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9125782, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "117023548", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Tuesday 07 January 2025 09:16:54 -0500 (0:00:00.385) 0:03:27.135 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Tuesday 07 January 2025 09:16:54 -0500 (0:00:00.039) 0:03:27.175 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Tuesday 07 January 2025 09:16:54 -0500 (0:00:00.040) 0:03:27.215 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Tuesday 07 January 2025 09:16:54 -0500 (0:00:00.038) 0:03:27.254 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Tuesday 07 January 2025 09:16:54 -0500 (0:00:00.038) 0:03:27.292 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Tuesday 07 January 2025 09:16:54 -0500 (0:00:00.039) 0:03:27.332 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Tuesday 07 January 2025 09:16:54 -0500 (0:00:00.042) 0:03:27.375 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Tuesday 07 January 2025 09:16:54 -0500 (0:00:00.061) 0:03:27.436 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Tuesday 07 January 2025 09:16:54 -0500 (0:00:00.167) 0:03:27.604 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-demo-mysql-volume.service", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Tuesday 07 January 2025 09:16:54 -0500 (0:00:00.102) 0:03:27.707 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Tuesday 07 January 2025 09:16:54 -0500 (0:00:00.068) 0:03:27.776 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Tuesday 07 January 2025 09:16:54 -0500 (0:00:00.042) 0:03:27.818 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/etc/containers/systemd/quadlet-demo-mysql.volume", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Tuesday 07 January 2025 09:16:54 -0500 (0:00:00.108) 0:03:27.927 ******* ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Tuesday 07 January 2025 09:16:55 -0500 (0:00:00.046) 0:03:27.974 ******* included: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4 Tuesday 07 January 2025 09:16:55 -0500 (0:00:00.089) 0:03:28.063 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stop and disable service] ************* task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 Tuesday 07 January 2025 09:16:55 -0500 (0:00:00.036) 0:03:28.100 ******* changed: [managed-node2] => { "changed": true, "enabled": false, "failed_when_result": false, "name": "quadlet-demo-mysql-volume.service", "state": "stopped", "status": { "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0", "ActiveEnterTimestamp": "Tue 2025-01-07 09:14:07 EST", "ActiveEnterTimestampMonotonic": "616275932", "ActiveExitTimestampMonotonic": "0", "ActiveState": "active", "After": "network-online.target sysinit.target basic.target system.slice systemd-journald.socket -.mount", "AllowIsolate": "no", "AssertResult": "yes", "AssertTimestamp": "Tue 2025-01-07 09:14:06 EST", "AssertTimestampMonotonic": "616228030", "Before": "shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Tue 2025-01-07 09:14:06 EST", "ConditionTimestampMonotonic": "616228027", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "quadlet-demo-mysql-volume.service", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveMemoryHigh": "3698233344", "EffectiveMemoryMax": "3698233344", "EffectiveTasksMax": "22349", "ExecMainCode": "1", "ExecMainExitTimestamp": "Tue 2025-01-07 09:14:07 EST", "ExecMainExitTimestampMonotonic": "616275751", "ExecMainHandoffTimestamp": "Tue 2025-01-07 09:14:07 EST", "ExecMainHandoffTimestampMonotonic": "616238408", "ExecMainPID": "36834", "ExecMainStartTimestamp": "Tue 2025-01-07 09:14:06 EST", "ExecMainStartTimestampMonotonic": "616228890", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman volume create --ignore systemd-quadlet-demo-mysql ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman volume create --ignore systemd-quadlet-demo-mysql ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/run/systemd/generator/quadlet-demo-mysql-volume.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "quadlet-demo-mysql-volume.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestamp": "Tue 2025-01-07 09:14:06 EST", "InactiveExitTimestampMonotonic": "616229349", "InvocationID": "0c48845aa0364f0795c076a5e0af14ff", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13968", "LimitNPROCSoft": "13968", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13968", "LimitSIGPENDINGSoft": "13968", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3139661824", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "[not set]", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "[not set]", "MemorySwapMax": "infinity", "MemorySwapPeak": "[not set]", "MemoryZSwapCurrent": "[not set]", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "quadlet-demo-mysql-volume.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "yes", "RemoveIPC": "no", "Requires": "-.mount system.slice sysinit.target", "RequiresMountsFor": "/run/containers", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "SourcePath": "/etc/containers/systemd/quadlet-demo-mysql.volume", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestamp": "Tue 2025-01-07 09:14:07 EST", "StateChangeTimestampMonotonic": "616275932", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "exited", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-demo-mysql-volume", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22349", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "infinity", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "oneshot", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "Wants": "network-online.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.podman : See if quadlet file exists] *********** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:33 Tuesday 07 January 2025 09:16:55 -0500 (0:00:00.828) 0:03:28.928 ******* ok: [managed-node2] => { "changed": false, "stat": { "atime": 1736259245.5237508, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "585f8cbdf0ec73000f9227dcffbef71e9552ea4a", "ctime": 1736259245.5297508, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 662700303, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1736259245.188748, "nlink": 1, "path": "/etc/containers/systemd/quadlet-demo-mysql.volume", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 9, "uid": 0, "version": "2254160868", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:38 Tuesday 07 January 2025 09:16:56 -0500 (0:00:00.397) 0:03:29.326 ******* included: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Slurp quadlet file] ******************* task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:6 Tuesday 07 January 2025 09:16:56 -0500 (0:00:00.070) 0:03:29.396 ******* ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:12 Tuesday 07 January 2025 09:16:56 -0500 (0:00:00.372) 0:03:29.769 ******* ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet yaml file] ************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:44 Tuesday 07 January 2025 09:16:56 -0500 (0:00:00.059) 0:03:29.828 ******* skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Reset raw variable] ******************* task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:52 Tuesday 07 January 2025 09:16:56 -0500 (0:00:00.060) 0:03:29.889 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_raw": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Remove quadlet file] ****************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42 Tuesday 07 January 2025 09:16:57 -0500 (0:00:00.074) 0:03:29.963 ******* changed: [managed-node2] => { "changed": true, "path": "/etc/containers/systemd/quadlet-demo-mysql.volume", "state": "absent" } TASK [fedora.linux_system_roles.podman : Refresh systemd] ********************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:48 Tuesday 07 January 2025 09:16:57 -0500 (0:00:00.513) 0:03:30.476 ******* ok: [managed-node2] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Remove managed resource] ************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:58 Tuesday 07 January 2025 09:16:58 -0500 (0:00:00.805) 0:03:31.282 ******* changed: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } TASK [fedora.linux_system_roles.podman : Remove volumes] *********************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99 Tuesday 07 January 2025 09:16:58 -0500 (0:00:00.424) 0:03:31.707 ******* skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Clear parsed podman variable] ********* task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:116 Tuesday 07 January 2025 09:16:58 -0500 (0:00:00.051) 0:03:31.758 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_parsed": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ******** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:120 Tuesday 07 January 2025 09:16:58 -0500 (0:00:00.040) 0:03:31.799 ******* changed: [managed-node2] => { "changed": true, "cmd": [ "podman", "image", "prune", "--all", "-f" ], "delta": "0:00:00.028355", "end": "2025-01-07 09:16:59.184345", "rc": 0, "start": "2025-01-07 09:16:59.155990" } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:131 Tuesday 07 January 2025 09:16:59 -0500 (0:00:00.418) 0:03:32.217 ******* included: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Tuesday 07 January 2025 09:16:59 -0500 (0:00:00.099) 0:03:32.317 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Tuesday 07 January 2025 09:16:59 -0500 (0:00:00.044) 0:03:32.361 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Tuesday 07 January 2025 09:16:59 -0500 (0:00:00.044) 0:03:32.405 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - images] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:141 Tuesday 07 January 2025 09:16:59 -0500 (0:00:00.043) 0:03:32.448 ******* ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "images", "-n" ], "delta": "0:00:00.031247", "end": "2025-01-07 09:16:59.862790", "rc": 0, "start": "2025-01-07 09:16:59.831543" } STDOUT: localhost/podman-pause 5.3.1-1733097600 df073f11d00f 6 minutes ago 701 kB quay.io/libpod/testimage 20210610 9f9ec7f2fdef 3 years ago 7.99 MB TASK [fedora.linux_system_roles.podman : For testing and debugging - volumes] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:150 Tuesday 07 January 2025 09:16:59 -0500 (0:00:00.500) 0:03:32.948 ******* ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "volume", "ls", "-n" ], "delta": "0:00:00.028274", "end": "2025-01-07 09:17:00.426459", "rc": 0, "start": "2025-01-07 09:17:00.398185" } TASK [fedora.linux_system_roles.podman : For testing and debugging - containers] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:159 Tuesday 07 January 2025 09:17:00 -0500 (0:00:00.499) 0:03:33.448 ******* ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "ps", "--noheading" ], "delta": "0:00:00.032476", "end": "2025-01-07 09:17:00.846569", "rc": 0, "start": "2025-01-07 09:17:00.814093" } STDOUT: 7b54a3800ffc localhost/podman-pause:5.3.1-1733097600 4 minutes ago Up 4 minutes 5d7db1ec4fb5-service b8b3f35398f1 localhost/podman-pause:5.3.1-1733097600 4 minutes ago Up 4 minutes 0.0.0.0:15002->80/tcp 5c56b3244e88-infra 153c7d559088 quay.io/libpod/testimage:20210610 4 minutes ago Up 4 minutes 0.0.0.0:15002->80/tcp httpd2-httpd2 c8ea84d4fdcd localhost/podman-pause:5.3.1-1733097600 3 minutes ago Up 3 minutes aeb421c16034-service 0479d7891e30 localhost/podman-pause:5.3.1-1733097600 3 minutes ago Up 3 minutes 0.0.0.0:15003->80/tcp 3457d4d54eec-infra 4bd23d3a55ea quay.io/libpod/testimage:20210610 3 minutes ago Up 3 minutes 0.0.0.0:15003->80/tcp httpd3-httpd3 TASK [fedora.linux_system_roles.podman : For testing and debugging - networks] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:168 Tuesday 07 January 2025 09:17:00 -0500 (0:00:00.471) 0:03:33.920 ******* ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "network", "ls", "-n", "-q" ], "delta": "0:00:00.026408", "end": "2025-01-07 09:17:01.340432", "rc": 0, "start": "2025-01-07 09:17:01.314024" } STDOUT: podman podman-default-kube-network systemd-quadlet-demo TASK [fedora.linux_system_roles.podman : For testing and debugging - secrets] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:177 Tuesday 07 January 2025 09:17:01 -0500 (0:00:00.497) 0:03:34.417 ******* ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - pods] ***** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:187 Tuesday 07 January 2025 09:17:01 -0500 (0:00:00.491) 0:03:34.909 ******* ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - services] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 Tuesday 07 January 2025 09:17:02 -0500 (0:00:00.527) 0:03:35.437 ******* ok: [managed-node2] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "audit-rules.service": { "name": "audit-rules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autofs.service": { "name": "autofs.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "capsule@.service": { "name": "capsule@.service", "source": "systemd", "state": "unknown", "status": "static" }, "certmonger.service": { "name": "certmonger.service", "source": "systemd", "state": "running", "status": "enabled" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd-restricted.service": { "name": "chronyd-restricted.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-broker.service": { "name": "dbus-broker.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.fedoraproject.FirewallD1.service": { "name": "dbus-org.fedoraproject.FirewallD1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "active", "status": "alias" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd.service": { "name": "dhcpcd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd@.service": { "name": "dhcpcd@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "ebtables.service": { "name": "ebtables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "fips-crypto-policy-overlay.service": { "name": "fips-crypto-policy-overlay.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "running", "status": "enabled" }, "fsidd.service": { "name": "fsidd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "stopped", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "grub2-systemd-integration.service": { "name": "grub2-systemd-integration.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "hv_kvp_daemon.service": { "name": "hv_kvp_daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "ip6tables.service": { "name": "ip6tables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ipset.service": { "name": "ipset.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iptables.service": { "name": "iptables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "logrotate.service": { "name": "logrotate.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm-devices-import.service": { "name": "lvm-devices-import.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "modprobe@.service": { "name": "modprobe@.service", "source": "systemd", "state": "unknown", "status": "static" }, "modprobe@configfs.service": { "name": "modprobe@configfs.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@dm_mod.service": { "name": "modprobe@dm_mod.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@drm.service": { "name": "modprobe@drm.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@efi_pstore.service": { "name": "modprobe@efi_pstore.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@fuse.service": { "name": "modprobe@fuse.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@loop.service": { "name": "modprobe@loop.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "netavark-dhcp-proxy.service": { "name": "netavark-dhcp-proxy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "netavark-firewalld-reload.service": { "name": "netavark-firewalld-reload.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "network.service": { "name": "network.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "static" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "pam_namespace.service": { "name": "pam_namespace.service", "source": "systemd", "state": "inactive", "status": "static" }, "pcscd.service": { "name": "pcscd.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "podman-auto-update.service": { "name": "podman-auto-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-clean-transient.service": { "name": "podman-clean-transient.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service": { "name": "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service", "source": "systemd", "state": "running", "status": "active" }, "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service": { "name": "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service", "source": "systemd", "state": "running", "status": "active" }, "podman-kube@.service": { "name": "podman-kube@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "podman-restart.service": { "name": "podman-restart.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman.service": { "name": "podman.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "running", "status": "static" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quadlet-demo-network.service": { "name": "quadlet-demo-network.service", "source": "systemd", "state": "stopped", "status": "generated" }, "quotaon-root.service": { "name": "quotaon-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "quotaon@.service": { "name": "quotaon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-svcgssd.service": { "name": "rpc-svcgssd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpmdb-migrate.service": { "name": "rpmdb-migrate.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rpmdb-rebuild.service": { "name": "rpmdb-rebuild.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "selinux-check-proper-disable.service": { "name": "selinux-check-proper-disable.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "serial-getty@ttyS0.service": { "name": "serial-getty@ttyS0.service", "source": "systemd", "state": "running", "status": "active" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ssh-host-keys-migration.service": { "name": "ssh-host-keys-migration.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-battery-check.service": { "name": "systemd-battery-check.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bless-boot.service": { "name": "systemd-bless-boot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-boot-check-no-failures.service": { "name": "systemd-boot-check-no-failures.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-boot-random-seed.service": { "name": "systemd-boot-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-boot-update.service": { "name": "systemd-boot-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-bootctl@.service": { "name": "systemd-bootctl@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-confext.service": { "name": "systemd-confext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-creds@.service": { "name": "systemd-creds@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-growfs-root.service": { "name": "systemd-growfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-growfs@.service": { "name": "systemd-growfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-clear.service": { "name": "systemd-hibernate-clear.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate-resume.service": { "name": "systemd-hibernate-resume.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald-sync@.service": { "name": "systemd-journald-sync@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-journald@.service": { "name": "systemd-journald@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-network-generator.service": { "name": "systemd-network-generator.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-networkd-wait-online.service": { "name": "systemd-networkd-wait-online.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-oomd.service": { "name": "systemd-oomd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-pcrextend@.service": { "name": "systemd-pcrextend@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrfs-root.service": { "name": "systemd-pcrfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pcrfs@.service": { "name": "systemd-pcrfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrlock-file-system.service": { "name": "systemd-pcrlock-file-system.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-code.service": { "name": "systemd-pcrlock-firmware-code.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-config.service": { "name": "systemd-pcrlock-firmware-config.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-machine-id.service": { "name": "systemd-pcrlock-machine-id.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-make-policy.service": { "name": "systemd-pcrlock-make-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-authority.service": { "name": "systemd-pcrlock-secureboot-authority.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-policy.service": { "name": "systemd-pcrlock-secureboot-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock@.service": { "name": "systemd-pcrlock@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrmachine.service": { "name": "systemd-pcrmachine.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-initrd.service": { "name": "systemd-pcrphase-initrd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-sysinit.service": { "name": "systemd-pcrphase-sysinit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase.service": { "name": "systemd-pcrphase.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-quotacheck-root.service": { "name": "systemd-quotacheck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-quotacheck@.service": { "name": "systemd-quotacheck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-repart.service": { "name": "systemd-repart.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-soft-reboot.service": { "name": "systemd-soft-reboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysext.service": { "name": "systemd-sysext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-sysext@.service": { "name": "systemd-sysext@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-sysupdate-reboot.service": { "name": "systemd-sysupdate-reboot.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysupdate.service": { "name": "systemd-sysupdate.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev-early.service": { "name": "systemd-tmpfiles-setup-dev-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup-early.service": { "name": "systemd-tpm2-setup-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup.service": { "name": "systemd-tpm2-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-load-credentials.service": { "name": "systemd-udev-load-credentials.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "active" }, "user-runtime-dir@3001.service": { "name": "user-runtime-dir@3001.service", "source": "systemd", "state": "stopped", "status": "active" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "active" }, "user@3001.service": { "name": "user@3001.service", "source": "systemd", "state": "running", "status": "active" }, "ypbind.service": { "name": "ypbind.service", "source": "systemd", "state": "stopped", "status": "not-found" } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Tuesday 07 January 2025 09:17:04 -0500 (0:00:01.965) 0:03:37.402 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Tuesday 07 January 2025 09:17:04 -0500 (0:00:00.037) 0:03:37.440 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_file_src": "quadlet-demo.network", "__podman_quadlet_spec": {}, "__podman_quadlet_str": "[Network]\nSubnet=192.168.30.0/24\nGateway=192.168.30.1\nLabel=app=wordpress", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Tuesday 07 January 2025 09:17:04 -0500 (0:00:00.052) 0:03:37.492 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "absent", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Tuesday 07 January 2025 09:17:04 -0500 (0:00:00.049) 0:03:37.541 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_quadlet_file_src", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Tuesday 07 January 2025 09:17:04 -0500 (0:00:00.041) 0:03:37.583 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-demo", "__podman_quadlet_type": "network", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Tuesday 07 January 2025 09:17:04 -0500 (0:00:00.055) 0:03:37.638 ******* included: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Tuesday 07 January 2025 09:17:04 -0500 (0:00:00.073) 0:03:37.711 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Tuesday 07 January 2025 09:17:04 -0500 (0:00:00.043) 0:03:37.754 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Tuesday 07 January 2025 09:17:04 -0500 (0:00:00.044) 0:03:37.799 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Tuesday 07 January 2025 09:17:04 -0500 (0:00:00.052) 0:03:37.852 ******* ok: [managed-node2] => { "changed": false, "stat": { "atime": 1736258992.2408626, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1736258966.2956612, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9125782, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "117023548", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Tuesday 07 January 2025 09:17:05 -0500 (0:00:00.400) 0:03:38.253 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Tuesday 07 January 2025 09:17:05 -0500 (0:00:00.039) 0:03:38.292 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Tuesday 07 January 2025 09:17:05 -0500 (0:00:00.039) 0:03:38.332 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Tuesday 07 January 2025 09:17:05 -0500 (0:00:00.051) 0:03:38.384 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Tuesday 07 January 2025 09:17:05 -0500 (0:00:00.157) 0:03:38.541 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Tuesday 07 January 2025 09:17:05 -0500 (0:00:00.095) 0:03:38.637 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Tuesday 07 January 2025 09:17:05 -0500 (0:00:00.069) 0:03:38.706 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Tuesday 07 January 2025 09:17:05 -0500 (0:00:00.089) 0:03:38.796 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Tuesday 07 January 2025 09:17:05 -0500 (0:00:00.073) 0:03:38.870 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-demo-network.service", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Tuesday 07 January 2025 09:17:06 -0500 (0:00:00.108) 0:03:38.978 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Tuesday 07 January 2025 09:17:06 -0500 (0:00:00.084) 0:03:39.062 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Tuesday 07 January 2025 09:17:06 -0500 (0:00:00.066) 0:03:39.128 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/etc/containers/systemd/quadlet-demo.network", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Tuesday 07 January 2025 09:17:06 -0500 (0:00:00.206) 0:03:39.335 ******* ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Tuesday 07 January 2025 09:17:06 -0500 (0:00:00.073) 0:03:39.408 ******* included: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4 Tuesday 07 January 2025 09:17:06 -0500 (0:00:00.182) 0:03:39.591 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stop and disable service] ************* task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 Tuesday 07 January 2025 09:17:06 -0500 (0:00:00.093) 0:03:39.684 ******* changed: [managed-node2] => { "changed": true, "enabled": false, "failed_when_result": false, "name": "quadlet-demo-network.service", "state": "stopped", "status": { "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0", "ActiveEnterTimestamp": "Tue 2025-01-07 09:14:01 EST", "ActiveEnterTimestampMonotonic": "610830020", "ActiveExitTimestampMonotonic": "0", "ActiveState": "active", "After": "network-online.target basic.target sysinit.target systemd-journald.socket -.mount system.slice", "AllowIsolate": "no", "AssertResult": "yes", "AssertTimestamp": "Tue 2025-01-07 09:14:01 EST", "AssertTimestampMonotonic": "610786783", "Before": "shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Tue 2025-01-07 09:14:01 EST", "ConditionTimestampMonotonic": "610786779", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "quadlet-demo-network.service", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveMemoryHigh": "3698233344", "EffectiveMemoryMax": "3698233344", "EffectiveTasksMax": "22349", "ExecMainCode": "1", "ExecMainExitTimestamp": "Tue 2025-01-07 09:14:01 EST", "ExecMainExitTimestampMonotonic": "610829818", "ExecMainHandoffTimestamp": "Tue 2025-01-07 09:14:01 EST", "ExecMainHandoffTimestampMonotonic": "610799599", "ExecMainPID": "36005", "ExecMainStartTimestamp": "Tue 2025-01-07 09:14:01 EST", "ExecMainStartTimestampMonotonic": "610787644", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman network create --ignore --subnet 192.168.30.0/24 --gateway 192.168.30.1 --label app=wordpress systemd-quadlet-demo ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman network create --ignore --subnet 192.168.30.0/24 --gateway 192.168.30.1 --label app=wordpress systemd-quadlet-demo ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/run/systemd/generator/quadlet-demo-network.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "quadlet-demo-network.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestamp": "Tue 2025-01-07 09:14:01 EST", "InactiveExitTimestampMonotonic": "610788089", "InvocationID": "3db89486a5404620a310c834c8bc19b4", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13968", "LimitNPROCSoft": "13968", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13968", "LimitSIGPENDINGSoft": "13968", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3116052480", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "[not set]", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "[not set]", "MemorySwapMax": "infinity", "MemorySwapPeak": "[not set]", "MemoryZSwapCurrent": "[not set]", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "quadlet-demo-network.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "yes", "RemoveIPC": "no", "Requires": "system.slice sysinit.target -.mount", "RequiresMountsFor": "/run/containers", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "SourcePath": "/etc/containers/systemd/quadlet-demo.network", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestamp": "Tue 2025-01-07 09:14:01 EST", "StateChangeTimestampMonotonic": "610830020", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "exited", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-demo-network", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22349", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "infinity", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "oneshot", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "Wants": "network-online.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.podman : See if quadlet file exists] *********** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:33 Tuesday 07 January 2025 09:17:07 -0500 (0:00:00.900) 0:03:40.584 ******* ok: [managed-node2] => { "changed": false, "stat": { "atime": 1736259240.0417047, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "e57c08d49aff4bae8daab138d913aeddaa8682a0", "ctime": 1736259240.0477047, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 616562899, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1736259239.664702, "nlink": 1, "path": "/etc/containers/systemd/quadlet-demo.network", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 74, "uid": 0, "version": "3943354933", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:38 Tuesday 07 January 2025 09:17:08 -0500 (0:00:00.453) 0:03:41.038 ******* included: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Slurp quadlet file] ******************* task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:6 Tuesday 07 January 2025 09:17:08 -0500 (0:00:00.135) 0:03:41.174 ******* ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:12 Tuesday 07 January 2025 09:17:08 -0500 (0:00:00.515) 0:03:41.689 ******* ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet yaml file] ************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:44 Tuesday 07 January 2025 09:17:08 -0500 (0:00:00.068) 0:03:41.758 ******* skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Reset raw variable] ******************* task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:52 Tuesday 07 January 2025 09:17:08 -0500 (0:00:00.048) 0:03:41.807 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_raw": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Remove quadlet file] ****************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42 Tuesday 07 January 2025 09:17:08 -0500 (0:00:00.047) 0:03:41.855 ******* changed: [managed-node2] => { "changed": true, "path": "/etc/containers/systemd/quadlet-demo.network", "state": "absent" } TASK [fedora.linux_system_roles.podman : Refresh systemd] ********************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:48 Tuesday 07 January 2025 09:17:09 -0500 (0:00:00.396) 0:03:42.251 ******* ok: [managed-node2] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Remove managed resource] ************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:58 Tuesday 07 January 2025 09:17:10 -0500 (0:00:00.773) 0:03:43.025 ******* changed: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } TASK [fedora.linux_system_roles.podman : Remove volumes] *********************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99 Tuesday 07 January 2025 09:17:10 -0500 (0:00:00.464) 0:03:43.490 ******* skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Clear parsed podman variable] ********* task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:116 Tuesday 07 January 2025 09:17:10 -0500 (0:00:00.072) 0:03:43.562 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_parsed": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ******** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:120 Tuesday 07 January 2025 09:17:10 -0500 (0:00:00.058) 0:03:43.620 ******* changed: [managed-node2] => { "changed": true, "cmd": [ "podman", "image", "prune", "--all", "-f" ], "delta": "0:00:00.027372", "end": "2025-01-07 09:17:11.014737", "rc": 0, "start": "2025-01-07 09:17:10.987365" } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:131 Tuesday 07 January 2025 09:17:11 -0500 (0:00:00.416) 0:03:44.036 ******* included: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Tuesday 07 January 2025 09:17:11 -0500 (0:00:00.069) 0:03:44.106 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Tuesday 07 January 2025 09:17:11 -0500 (0:00:00.036) 0:03:44.143 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Tuesday 07 January 2025 09:17:11 -0500 (0:00:00.038) 0:03:44.181 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - images] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:141 Tuesday 07 January 2025 09:17:11 -0500 (0:00:00.044) 0:03:44.226 ******* ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "images", "-n" ], "delta": "0:00:00.030336", "end": "2025-01-07 09:17:11.626259", "rc": 0, "start": "2025-01-07 09:17:11.595923" } STDOUT: localhost/podman-pause 5.3.1-1733097600 df073f11d00f 6 minutes ago 701 kB quay.io/libpod/testimage 20210610 9f9ec7f2fdef 3 years ago 7.99 MB TASK [fedora.linux_system_roles.podman : For testing and debugging - volumes] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:150 Tuesday 07 January 2025 09:17:11 -0500 (0:00:00.442) 0:03:44.669 ******* ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "volume", "ls", "-n" ], "delta": "0:00:00.027724", "end": "2025-01-07 09:17:12.122166", "rc": 0, "start": "2025-01-07 09:17:12.094442" } TASK [fedora.linux_system_roles.podman : For testing and debugging - containers] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:159 Tuesday 07 January 2025 09:17:12 -0500 (0:00:00.613) 0:03:45.283 ******* ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "ps", "--noheading" ], "delta": "0:00:00.033890", "end": "2025-01-07 09:17:12.706955", "rc": 0, "start": "2025-01-07 09:17:12.673065" } STDOUT: 7b54a3800ffc localhost/podman-pause:5.3.1-1733097600 4 minutes ago Up 4 minutes 5d7db1ec4fb5-service b8b3f35398f1 localhost/podman-pause:5.3.1-1733097600 4 minutes ago Up 4 minutes 0.0.0.0:15002->80/tcp 5c56b3244e88-infra 153c7d559088 quay.io/libpod/testimage:20210610 4 minutes ago Up 4 minutes 0.0.0.0:15002->80/tcp httpd2-httpd2 c8ea84d4fdcd localhost/podman-pause:5.3.1-1733097600 3 minutes ago Up 3 minutes aeb421c16034-service 0479d7891e30 localhost/podman-pause:5.3.1-1733097600 3 minutes ago Up 3 minutes 0.0.0.0:15003->80/tcp 3457d4d54eec-infra 4bd23d3a55ea quay.io/libpod/testimage:20210610 3 minutes ago Up 3 minutes 0.0.0.0:15003->80/tcp httpd3-httpd3 TASK [fedora.linux_system_roles.podman : For testing and debugging - networks] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:168 Tuesday 07 January 2025 09:17:12 -0500 (0:00:00.457) 0:03:45.740 ******* ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "network", "ls", "-n", "-q" ], "delta": "0:00:00.027346", "end": "2025-01-07 09:17:13.164884", "rc": 0, "start": "2025-01-07 09:17:13.137538" } STDOUT: podman podman-default-kube-network TASK [fedora.linux_system_roles.podman : For testing and debugging - secrets] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:177 Tuesday 07 January 2025 09:17:13 -0500 (0:00:00.491) 0:03:46.232 ******* ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - pods] ***** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:187 Tuesday 07 January 2025 09:17:13 -0500 (0:00:00.502) 0:03:46.735 ******* ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - services] *** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 Tuesday 07 January 2025 09:17:14 -0500 (0:00:00.475) 0:03:47.210 ******* ok: [managed-node2] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "audit-rules.service": { "name": "audit-rules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autofs.service": { "name": "autofs.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "capsule@.service": { "name": "capsule@.service", "source": "systemd", "state": "unknown", "status": "static" }, "certmonger.service": { "name": "certmonger.service", "source": "systemd", "state": "running", "status": "enabled" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd-restricted.service": { "name": "chronyd-restricted.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-broker.service": { "name": "dbus-broker.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.fedoraproject.FirewallD1.service": { "name": "dbus-org.fedoraproject.FirewallD1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "active", "status": "alias" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd.service": { "name": "dhcpcd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd@.service": { "name": "dhcpcd@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "ebtables.service": { "name": "ebtables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "fips-crypto-policy-overlay.service": { "name": "fips-crypto-policy-overlay.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "running", "status": "enabled" }, "fsidd.service": { "name": "fsidd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "stopped", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "grub2-systemd-integration.service": { "name": "grub2-systemd-integration.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "hv_kvp_daemon.service": { "name": "hv_kvp_daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "ip6tables.service": { "name": "ip6tables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ipset.service": { "name": "ipset.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iptables.service": { "name": "iptables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "logrotate.service": { "name": "logrotate.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm-devices-import.service": { "name": "lvm-devices-import.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "modprobe@.service": { "name": "modprobe@.service", "source": "systemd", "state": "unknown", "status": "static" }, "modprobe@configfs.service": { "name": "modprobe@configfs.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@dm_mod.service": { "name": "modprobe@dm_mod.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@drm.service": { "name": "modprobe@drm.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@efi_pstore.service": { "name": "modprobe@efi_pstore.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@fuse.service": { "name": "modprobe@fuse.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@loop.service": { "name": "modprobe@loop.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "netavark-dhcp-proxy.service": { "name": "netavark-dhcp-proxy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "netavark-firewalld-reload.service": { "name": "netavark-firewalld-reload.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "network.service": { "name": "network.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "static" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "pam_namespace.service": { "name": "pam_namespace.service", "source": "systemd", "state": "inactive", "status": "static" }, "pcscd.service": { "name": "pcscd.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "podman-auto-update.service": { "name": "podman-auto-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-clean-transient.service": { "name": "podman-clean-transient.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service": { "name": "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service", "source": "systemd", "state": "running", "status": "active" }, "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service": { "name": "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service", "source": "systemd", "state": "running", "status": "active" }, "podman-kube@.service": { "name": "podman-kube@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "podman-restart.service": { "name": "podman-restart.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman.service": { "name": "podman.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "running", "status": "static" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quotaon-root.service": { "name": "quotaon-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "quotaon@.service": { "name": "quotaon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-svcgssd.service": { "name": "rpc-svcgssd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpmdb-migrate.service": { "name": "rpmdb-migrate.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rpmdb-rebuild.service": { "name": "rpmdb-rebuild.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "selinux-check-proper-disable.service": { "name": "selinux-check-proper-disable.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "serial-getty@ttyS0.service": { "name": "serial-getty@ttyS0.service", "source": "systemd", "state": "running", "status": "active" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ssh-host-keys-migration.service": { "name": "ssh-host-keys-migration.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-battery-check.service": { "name": "systemd-battery-check.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bless-boot.service": { "name": "systemd-bless-boot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-boot-check-no-failures.service": { "name": "systemd-boot-check-no-failures.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-boot-random-seed.service": { "name": "systemd-boot-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-boot-update.service": { "name": "systemd-boot-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-bootctl@.service": { "name": "systemd-bootctl@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-confext.service": { "name": "systemd-confext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-creds@.service": { "name": "systemd-creds@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-growfs-root.service": { "name": "systemd-growfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-growfs@.service": { "name": "systemd-growfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-clear.service": { "name": "systemd-hibernate-clear.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate-resume.service": { "name": "systemd-hibernate-resume.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald-sync@.service": { "name": "systemd-journald-sync@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-journald@.service": { "name": "systemd-journald@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-network-generator.service": { "name": "systemd-network-generator.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-networkd-wait-online.service": { "name": "systemd-networkd-wait-online.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-oomd.service": { "name": "systemd-oomd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-pcrextend@.service": { "name": "systemd-pcrextend@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrfs-root.service": { "name": "systemd-pcrfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pcrfs@.service": { "name": "systemd-pcrfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrlock-file-system.service": { "name": "systemd-pcrlock-file-system.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-code.service": { "name": "systemd-pcrlock-firmware-code.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-config.service": { "name": "systemd-pcrlock-firmware-config.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-machine-id.service": { "name": "systemd-pcrlock-machine-id.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-make-policy.service": { "name": "systemd-pcrlock-make-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-authority.service": { "name": "systemd-pcrlock-secureboot-authority.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-policy.service": { "name": "systemd-pcrlock-secureboot-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock@.service": { "name": "systemd-pcrlock@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrmachine.service": { "name": "systemd-pcrmachine.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-initrd.service": { "name": "systemd-pcrphase-initrd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-sysinit.service": { "name": "systemd-pcrphase-sysinit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase.service": { "name": "systemd-pcrphase.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-quotacheck-root.service": { "name": "systemd-quotacheck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-quotacheck@.service": { "name": "systemd-quotacheck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-repart.service": { "name": "systemd-repart.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-soft-reboot.service": { "name": "systemd-soft-reboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysext.service": { "name": "systemd-sysext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-sysext@.service": { "name": "systemd-sysext@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-sysupdate-reboot.service": { "name": "systemd-sysupdate-reboot.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysupdate.service": { "name": "systemd-sysupdate.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev-early.service": { "name": "systemd-tmpfiles-setup-dev-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup-early.service": { "name": "systemd-tpm2-setup-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup.service": { "name": "systemd-tpm2-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-load-credentials.service": { "name": "systemd-udev-load-credentials.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "active" }, "user-runtime-dir@3001.service": { "name": "user-runtime-dir@3001.service", "source": "systemd", "state": "stopped", "status": "active" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "active" }, "user@3001.service": { "name": "user@3001.service", "source": "systemd", "state": "running", "status": "active" }, "ypbind.service": { "name": "ypbind.service", "source": "systemd", "state": "stopped", "status": "not-found" } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Tuesday 07 January 2025 09:17:16 -0500 (0:00:01.938) 0:03:49.149 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Cancel linger] ************************ task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:196 Tuesday 07 January 2025 09:17:16 -0500 (0:00:00.058) 0:03:49.207 ******* skipping: [managed-node2] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Handle credential files - absent] ***** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:202 Tuesday 07 January 2025 09:17:16 -0500 (0:00:00.045) 0:03:49.252 ******* skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - absent] ******** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:211 Tuesday 07 January 2025 09:17:16 -0500 (0:00:00.044) 0:03:49.297 ******* skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [Ensure no resources] ***************************************************** task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:188 Tuesday 07 January 2025 09:17:16 -0500 (0:00:00.070) 0:03:49.368 ******* fatal: [managed-node2]: FAILED! => { "assertion": "__podman_test_debug_images.stdout == \"\"", "changed": false, "evaluated_to": false } MSG: Assertion failed TASK [Debug] ******************************************************************* task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:199 Tuesday 07 January 2025 09:17:16 -0500 (0:00:00.044) 0:03:49.412 ******* ok: [managed-node2] => { "changed": false, "cmd": "exec 1>&2\nset -x\nset -o pipefail\nsystemctl list-units --plain -l --all | grep quadlet || :\nsystemctl list-unit-files --all | grep quadlet || :\nsystemctl list-units --plain --failed -l --all | grep quadlet || :\n", "delta": "0:00:00.375111", "end": "2025-01-07 09:17:17.133736", "rc": 0, "start": "2025-01-07 09:17:16.758625" } STDERR: + set -o pipefail + systemctl list-units --plain -l --all + grep quadlet + : + systemctl list-unit-files --all + grep quadlet + : + systemctl list-units --plain --failed -l --all + grep quadlet + : TASK [Get journald] ************************************************************ task path: /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:209 Tuesday 07 January 2025 09:17:17 -0500 (0:00:00.744) 0:03:50.157 ******* fatal: [managed-node2]: FAILED! => { "changed": false, "cmd": [ "journalctl", "-ex" ], "delta": "0:00:00.029885", "end": "2025-01-07 09:17:17.536061", "failed_when_result": true, "rc": 0, "start": "2025-01-07 09:17:17.506176" } STDOUT: Jan 07 09:12:13 managed-node2 python3.12[23327]: ansible-file Invoked with path=/tmp/lsr_kom0gs4j_podman/httpd1-create state=directory owner=podman_basic_user group=3001 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Jan 07 09:12:13 managed-node2 sudo[23500]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ijejrcautuquyjfzlowgygnnvlvpkaqf ; XDG_RUNTIME_DIR=/run/user/3001 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736259133.298375-15090-71630362262234/AnsiballZ_podman_image.py' Jan 07 09:12:13 managed-node2 sudo[23500]: pam_systemd(sudo:session): New sd-bus connection (system-bus-pam-systemd-23500) opened. Jan 07 09:12:13 managed-node2 sudo[23500]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0) Jan 07 09:12:13 managed-node2 systemd[23047]: Created slice session.slice - User Core Session Slice. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 18. Jan 07 09:12:13 managed-node2 systemd[23047]: Starting dbus-broker.service - D-Bus User Message Bus... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 13. Jan 07 09:12:13 managed-node2 dbus-broker-launch[23523]: Policy to allow eavesdropping in /usr/share/dbus-1/session.conf +31: Eavesdropping is deprecated and ignored Jan 07 09:12:13 managed-node2 dbus-broker-launch[23523]: Policy to allow eavesdropping in /usr/share/dbus-1/session.conf +33: Eavesdropping is deprecated and ignored Jan 07 09:12:13 managed-node2 systemd[23047]: Started dbus-broker.service - D-Bus User Message Bus. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 13. Jan 07 09:12:13 managed-node2 dbus-broker-launch[23523]: Ready Jan 07 09:12:13 managed-node2 systemd[23047]: Created slice user.slice - Slice /user. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 20. Jan 07 09:12:13 managed-node2 systemd[23047]: Started podman-23511.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 19. Jan 07 09:12:13 managed-node2 systemd[23047]: Started podman-pause-36ce1bf5.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 23. Jan 07 09:12:14 managed-node2 systemd[23047]: Started podman-23527.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 27. Jan 07 09:12:15 managed-node2 systemd[23047]: Started podman-23552.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 31. Jan 07 09:12:15 managed-node2 sudo[23500]: pam_unix(sudo:session): session closed for user podman_basic_user Jan 07 09:12:16 managed-node2 python3.12[23690]: ansible-stat Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 07 09:12:16 managed-node2 python3.12[23821]: ansible-file Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d state=directory owner=podman_basic_user group=3001 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 07 09:12:17 managed-node2 python3.12[23952]: ansible-ansible.legacy.stat Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 07 09:12:17 managed-node2 python3.12[24057]: ansible-ansible.legacy.copy Invoked with dest=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml owner=podman_basic_user group=3001 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1736259136.8938768-15263-66515330324667/.source.yml _original_basename=.7ihyk1_p follow=False checksum=906ffc495c17b7b3c2713751534afab1dd238226 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jan 07 09:12:17 managed-node2 sudo[24230]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-wteuofgsqkravhujeqqglijpjhgodjqi ; XDG_RUNTIME_DIR=/run/user/3001 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736259137.6223898-15310-111121950013799/AnsiballZ_podman_play.py' Jan 07 09:12:17 managed-node2 sudo[24230]: pam_systemd(sudo:session): New sd-bus connection (system-bus-pam-systemd-24230) opened. Jan 07 09:12:17 managed-node2 sudo[24230]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0) Jan 07 09:12:17 managed-node2 python3.12[24233]: ansible-containers.podman.podman_play Invoked with state=started debug=True log_level=debug kube_file=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None quiet=None recreate=None userns=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Jan 07 09:12:18 managed-node2 systemd[23047]: Started podman-24240.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 35. Jan 07 09:12:18 managed-node2 systemd[23047]: Created slice user-libpod_pod_7ee39644f44c212ed8c9468fbb32c7003e71e8cf99bc08bae81d105ea55f94f3.slice - cgroup user-libpod_pod_7ee39644f44c212ed8c9468fbb32c7003e71e8cf99bc08bae81d105ea55f94f3.slice. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 39. Jan 07 09:12:18 managed-node2 kernel: tun: Universal TUN/TAP device driver, 1.6 Jan 07 09:12:18 managed-node2 systemd[23047]: Started rootless-netns-177d061c.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 43. Jan 07 09:12:18 managed-node2 kernel: bridge: filtering via arp/ip/ip6tables is no longer available by default. Update your scripts to load br_netfilter if you need this. Jan 07 09:12:18 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Jan 07 09:12:18 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Jan 07 09:12:18 managed-node2 kernel: veth0: entered allmulticast mode Jan 07 09:12:18 managed-node2 kernel: veth0: entered promiscuous mode Jan 07 09:12:18 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Jan 07 09:12:18 managed-node2 kernel: podman1: port 1(veth0) entered forwarding state Jan 07 09:12:18 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Jan 07 09:12:18 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Jan 07 09:12:18 managed-node2 kernel: podman1: port 1(veth0) entered forwarding state Jan 07 09:12:18 managed-node2 systemd[23047]: Started run-r1a32890a0caa4de8a1ca84865a041cdc.scope - /usr/libexec/podman/aardvark-dns --config /run/user/3001/containers/networks/aardvark-dns -p 53 run. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 47. Jan 07 09:12:18 managed-node2 aardvark-dns[24322]: starting aardvark on a child with pid 24323 Jan 07 09:12:18 managed-node2 aardvark-dns[24323]: Successfully parsed config Jan 07 09:12:18 managed-node2 aardvark-dns[24323]: Listen v4 ip {"podman-default-kube-network": [10.89.0.1]} Jan 07 09:12:18 managed-node2 aardvark-dns[24323]: Listen v6 ip {} Jan 07 09:12:18 managed-node2 aardvark-dns[24323]: Using the following upstream servers: [169.254.1.1:53, 10.29.169.13:53, 10.29.170.12:53] Jan 07 09:12:18 managed-node2 conmon[24338]: conmon bce005bb46a5e13ccfec : failed to write to /proc/self/oom_score_adj: Permission denied Jan 07 09:12:18 managed-node2 systemd[23047]: Started libpod-conmon-bce005bb46a5e13ccfecf7373d40775f878fcdddc0a8a4651d9fd838ef5979d0.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 51. Jan 07 09:12:18 managed-node2 conmon[24339]: conmon bce005bb46a5e13ccfec : addr{sun_family=AF_UNIX, sun_path=/proc/self/fd/14/attach} Jan 07 09:12:18 managed-node2 conmon[24339]: conmon bce005bb46a5e13ccfec : terminal_ctrl_fd: 14 Jan 07 09:12:18 managed-node2 conmon[24339]: conmon bce005bb46a5e13ccfec : winsz read side: 17, winsz write side: 18 Jan 07 09:12:18 managed-node2 systemd[23047]: Started libpod-bce005bb46a5e13ccfecf7373d40775f878fcdddc0a8a4651d9fd838ef5979d0.scope - libcrun container. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 56. Jan 07 09:12:18 managed-node2 conmon[24339]: conmon bce005bb46a5e13ccfec : container PID: 24341 Jan 07 09:12:18 managed-node2 conmon[24343]: conmon 601607edc0bad5ac48b6 : failed to write to /proc/self/oom_score_adj: Permission denied Jan 07 09:12:18 managed-node2 systemd[23047]: Started libpod-conmon-601607edc0bad5ac48b6a946a7023c40d08491d57765f188c572ad31abeeb41a.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 61. Jan 07 09:12:18 managed-node2 conmon[24344]: conmon 601607edc0bad5ac48b6 : addr{sun_family=AF_UNIX, sun_path=/proc/self/fd/13/attach} Jan 07 09:12:18 managed-node2 conmon[24344]: conmon 601607edc0bad5ac48b6 : terminal_ctrl_fd: 13 Jan 07 09:12:18 managed-node2 conmon[24344]: conmon 601607edc0bad5ac48b6 : winsz read side: 16, winsz write side: 17 Jan 07 09:12:18 managed-node2 systemd[23047]: Started libpod-601607edc0bad5ac48b6a946a7023c40d08491d57765f188c572ad31abeeb41a.scope - libcrun container. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 66. Jan 07 09:12:18 managed-node2 conmon[24344]: conmon 601607edc0bad5ac48b6 : container PID: 24347 Jan 07 09:12:18 managed-node2 python3.12[24233]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE command: /bin/podman play kube --start=true --log-level=debug /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml Jan 07 09:12:18 managed-node2 python3.12[24233]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stdout: Pod: 7ee39644f44c212ed8c9468fbb32c7003e71e8cf99bc08bae81d105ea55f94f3 Container: 601607edc0bad5ac48b6a946a7023c40d08491d57765f188c572ad31abeeb41a Jan 07 09:12:18 managed-node2 python3.12[24233]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stderr: time="2025-01-07T09:12:18-05:00" level=info msg="/bin/podman filtering at log level debug" time="2025-01-07T09:12:18-05:00" level=debug msg="Called kube.PersistentPreRunE(/bin/podman play kube --start=true --log-level=debug /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml)" time="2025-01-07T09:12:18-05:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" time="2025-01-07T09:12:18-05:00" level=info msg="Using sqlite as database backend" time="2025-01-07T09:12:18-05:00" level=debug msg="systemd-logind: Unknown object '/'." time="2025-01-07T09:12:18-05:00" level=debug msg="Using graph driver overlay" time="2025-01-07T09:12:18-05:00" level=debug msg="Using graph root /home/podman_basic_user/.local/share/containers/storage" time="2025-01-07T09:12:18-05:00" level=debug msg="Using run root /run/user/3001/containers" time="2025-01-07T09:12:18-05:00" level=debug msg="Using static dir /home/podman_basic_user/.local/share/containers/storage/libpod" time="2025-01-07T09:12:18-05:00" level=debug msg="Using tmp dir /run/user/3001/libpod/tmp" time="2025-01-07T09:12:18-05:00" level=debug msg="Using volume path /home/podman_basic_user/.local/share/containers/storage/volumes" time="2025-01-07T09:12:18-05:00" level=debug msg="Using transient store: false" time="2025-01-07T09:12:18-05:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" time="2025-01-07T09:12:18-05:00" level=debug msg="Cached value indicated that overlay is supported" time="2025-01-07T09:12:18-05:00" level=debug msg="Cached value indicated that overlay is supported" time="2025-01-07T09:12:18-05:00" level=debug msg="Cached value indicated that metacopy is not being used" time="2025-01-07T09:12:18-05:00" level=debug msg="Cached value indicated that native-diff is usable" time="2025-01-07T09:12:18-05:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=true, usingMetacopy=false" time="2025-01-07T09:12:18-05:00" level=debug msg="Initializing event backend file" time="2025-01-07T09:12:18-05:00" level=debug msg="Configured OCI runtime crun-vm initialization failed: no valid executable found for OCI runtime crun-vm: invalid argument" time="2025-01-07T09:12:18-05:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" time="2025-01-07T09:12:18-05:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" time="2025-01-07T09:12:18-05:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" time="2025-01-07T09:12:18-05:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" time="2025-01-07T09:12:18-05:00" level=debug msg="Configured OCI runtime runc initialization failed: no valid executable found for OCI runtime runc: invalid argument" time="2025-01-07T09:12:18-05:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" time="2025-01-07T09:12:18-05:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" time="2025-01-07T09:12:18-05:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" time="2025-01-07T09:12:18-05:00" level=debug msg="Using OCI runtime \"/usr/bin/crun\"" time="2025-01-07T09:12:18-05:00" level=info msg="Setting parallel job count to 7" time="2025-01-07T09:12:18-05:00" level=debug msg="Successfully loaded 1 networks" time="2025-01-07T09:12:18-05:00" level=debug msg="found free device name podman1" time="2025-01-07T09:12:18-05:00" level=debug msg="found free ipv4 network subnet 10.89.0.0/24" time="2025-01-07T09:12:18-05:00" level=debug msg="Looking up image \"localhost/podman-pause:5.3.1-1733097600\" in local containers storage" time="2025-01-07T09:12:18-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-01-07T09:12:18-05:00" level=debug msg="Trying \"localhost/podman-pause:5.3.1-1733097600\" ..." time="2025-01-07T09:12:18-05:00" level=debug msg="reference \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:5.3.1-1733097600\" does not resolve to an image ID" time="2025-01-07T09:12:18-05:00" level=debug msg="Trying \"localhost/podman-pause:5.3.1-1733097600\" ..." time="2025-01-07T09:12:18-05:00" level=debug msg="reference \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:5.3.1-1733097600\" does not resolve to an image ID" time="2025-01-07T09:12:18-05:00" level=debug msg="Trying \"localhost/podman-pause:5.3.1-1733097600\" ..." time="2025-01-07T09:12:18-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-01-07T09:12:18-05:00" level=debug msg="FROM \"scratch\"" time="2025-01-07T09:12:18-05:00" level=debug msg="Cached value indicated that idmapped mounts for overlay are not supported" time="2025-01-07T09:12:18-05:00" level=debug msg="Check for idmapped mounts support " time="2025-01-07T09:12:18-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-01-07T09:12:18-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-01-07T09:12:18-05:00" level=debug msg="overlay: test mount indicated that volatile is being used" time="2025-01-07T09:12:18-05:00" level=debug msg="overlay: mount_data=lowerdir=/home/podman_basic_user/.local/share/containers/storage/overlay/95810cdc96394eb6d5d7ed02b34581c75ca4d922a33b130ac315144166ba184c/empty,upperdir=/home/podman_basic_user/.local/share/containers/storage/overlay/95810cdc96394eb6d5d7ed02b34581c75ca4d922a33b130ac315144166ba184c/diff,workdir=/home/podman_basic_user/.local/share/containers/storage/overlay/95810cdc96394eb6d5d7ed02b34581c75ca4d922a33b130ac315144166ba184c/work,userxattr,volatile,context=\"system_u:object_r:container_file_t:s0:c4,c889\"" time="2025-01-07T09:12:18-05:00" level=debug msg="Container ID: dfc458c14bc3d2dd3ad02df721c87c4ae8579bb15ec61b416f3dcd606503d0b9" time="2025-01-07T09:12:18-05:00" level=debug msg="Parsed Step: {Env:[PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin] Command:copy Args:[/usr/libexec/podman/catatonit /catatonit] Flags:[] Attrs:map[] Message:COPY /usr/libexec/podman/catatonit /catatonit Heredocs:[] Original:COPY /usr/libexec/podman/catatonit /catatonit}" time="2025-01-07T09:12:18-05:00" level=debug msg="COPY []string(nil), imagebuilder.Copy{FromFS:false, From:\"\", Src:[]string{\"/usr/libexec/podman/catatonit\"}, Dest:\"/catatonit\", Download:false, Chown:\"\", Chmod:\"\", Checksum:\"\", Files:[]imagebuilder.File(nil), KeepGitDir:false, Link:false, Parents:false, Excludes:[]string(nil)}" time="2025-01-07T09:12:18-05:00" level=debug msg="EnsureContainerPath \"/\" (owner \"\", mode 0) in \"dfc458c14bc3d2dd3ad02df721c87c4ae8579bb15ec61b416f3dcd606503d0b9\"" time="2025-01-07T09:12:18-05:00" level=debug msg="added content file:872045bc026632654869cebc14f91fe531969760bf26925e889d9b70527c8b67" time="2025-01-07T09:12:18-05:00" level=debug msg="Parsed Step: {Env:[PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin] Command:entrypoint Args:[/catatonit -P] Flags:[] Attrs:map[json:true] Message:ENTRYPOINT /catatonit -P Heredocs:[] Original:ENTRYPOINT [\"/catatonit\", \"-P\"]}" time="2025-01-07T09:12:18-05:00" level=debug msg="EnsureContainerPath \"/\" (owner \"\", mode 0) in \"dfc458c14bc3d2dd3ad02df721c87c4ae8579bb15ec61b416f3dcd606503d0b9\"" time="2025-01-07T09:12:18-05:00" level=debug msg="COMMIT localhost/podman-pause:5.3.1-1733097600" time="2025-01-07T09:12:18-05:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:5.3.1-1733097600\"" time="2025-01-07T09:12:18-05:00" level=debug msg="COMMIT \"containers-storage:[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:5.3.1-1733097600\"" time="2025-01-07T09:12:18-05:00" level=debug msg="committing image with reference \"containers-storage:[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:5.3.1-1733097600\" is allowed by policy" time="2025-01-07T09:12:18-05:00" level=debug msg="layer list: [\"95810cdc96394eb6d5d7ed02b34581c75ca4d922a33b130ac315144166ba184c\"]" time="2025-01-07T09:12:18-05:00" level=debug msg="using \"/var/tmp/buildah3006773201\" to hold temporary data" time="2025-01-07T09:12:18-05:00" level=debug msg="Tar with options on /home/podman_basic_user/.local/share/containers/storage/overlay/95810cdc96394eb6d5d7ed02b34581c75ca4d922a33b130ac315144166ba184c/diff" time="2025-01-07T09:12:18-05:00" level=debug msg="layer \"95810cdc96394eb6d5d7ed02b34581c75ca4d922a33b130ac315144166ba184c\" size is 699392 bytes, uncompressed digest sha256:83362ec78cab9ea69b1fd7cce8c7ba3164dab292c6df41c7dbcb6971d43d89e6, possibly-compressed digest sha256:83362ec78cab9ea69b1fd7cce8c7ba3164dab292c6df41c7dbcb6971d43d89e6" time="2025-01-07T09:12:18-05:00" level=debug msg="OCIv1 config = {\"created\":\"2025-01-07T14:12:18.225948604Z\",\"architecture\":\"amd64\",\"os\":\"linux\",\"config\":{\"Env\":[\"PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin\"],\"Entrypoint\":[\"/catatonit\",\"-P\"],\"WorkingDir\":\"/\",\"Labels\":{\"io.buildah.version\":\"1.38.0\"}},\"rootfs\":{\"type\":\"layers\",\"diff_ids\":[\"sha256:83362ec78cab9ea69b1fd7cce8c7ba3164dab292c6df41c7dbcb6971d43d89e6\"]},\"history\":[{\"created\":\"2025-01-07T14:12:18.195886547Z\",\"created_by\":\"/bin/sh -c #(nop) COPY file:872045bc026632654869cebc14f91fe531969760bf26925e889d9b70527c8b67 in /catatonit \",\"empty_layer\":true},{\"created\":\"2025-01-07T14:12:18.228836521Z\",\"created_by\":\"/bin/sh -c #(nop) ENTRYPOINT [\\\"/catatonit\\\", \\\"-P\\\"]\"}]}" time="2025-01-07T09:12:18-05:00" level=debug msg="OCIv1 manifest = {\"schemaVersion\":2,\"mediaType\":\"application/vnd.oci.image.manifest.v1+json\",\"config\":{\"mediaType\":\"application/vnd.oci.image.config.v1+json\",\"digest\":\"sha256:5695e5c8db6bfee3f15e4db1f738ee3d7b797567d0f04ac6a3333ae36b2d1612\",\"size\":685},\"layers\":[{\"mediaType\":\"application/vnd.oci.image.layer.v1.tar\",\"digest\":\"sha256:83362ec78cab9ea69b1fd7cce8c7ba3164dab292c6df41c7dbcb6971d43d89e6\",\"size\":699392}],\"annotations\":{\"org.opencontainers.image.base.digest\":\"\",\"org.opencontainers.image.base.name\":\"\"}}" time="2025-01-07T09:12:18-05:00" level=debug msg="Docker v2s2 config = {\"created\":\"2025-01-07T14:12:18.225948604Z\",\"container\":\"dfc458c14bc3d2dd3ad02df721c87c4ae8579bb15ec61b416f3dcd606503d0b9\",\"container_config\":{\"Hostname\":\"\",\"Domainname\":\"\",\"User\":\"\",\"AttachStdin\":false,\"AttachStdout\":false,\"AttachStderr\":false,\"Tty\":false,\"OpenStdin\":false,\"StdinOnce\":false,\"Env\":[\"PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin\"],\"Cmd\":null,\"Image\":\"\",\"Volumes\":{},\"WorkingDir\":\"/\",\"Entrypoint\":[\"/catatonit\",\"-P\"],\"OnBuild\":[],\"Labels\":{\"io.buildah.version\":\"1.38.0\"}},\"config\":{\"Hostname\":\"\",\"Domainname\":\"\",\"User\":\"\",\"AttachStdin\":false,\"AttachStdout\":false,\"AttachStderr\":false,\"Tty\":false,\"OpenStdin\":false,\"StdinOnce\":false,\"Env\":[\"PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin\"],\"Cmd\":null,\"Image\":\"\",\"Volumes\":{},\"WorkingDir\":\"/\",\"Entrypoint\":[\"/catatonit\",\"-P\"],\"OnBuild\":[],\"Labels\":{\"io.buildah.version\":\"1.38.0\"}},\"architecture\":\"amd64\",\"os\":\"linux\",\"rootfs\":{\"type\":\"layers\",\"diff_ids\":[\"sha256:83362ec78cab9ea69b1fd7cce8c7ba3164dab292c6df41c7dbcb6971d43d89e6\"]},\"history\":[{\"created\":\"2025-01-07T14:12:18.195886547Z\",\"created_by\":\"/bin/sh -c #(nop) COPY file:872045bc026632654869cebc14f91fe531969760bf26925e889d9b70527c8b67 in /catatonit \",\"empty_layer\":true},{\"created\":\"2025-01-07T14:12:18.228836521Z\",\"created_by\":\"/bin/sh -c #(nop) ENTRYPOINT [\\\"/catatonit\\\", \\\"-P\\\"]\"}]}" time="2025-01-07T09:12:18-05:00" level=debug msg="Docker v2s2 manifest = {\"schemaVersion\":2,\"mediaType\":\"application/vnd.docker.distribution.manifest.v2+json\",\"config\":{\"mediaType\":\"application/vnd.docker.container.image.v1+json\",\"size\":1348,\"digest\":\"sha256:cb1e3c19c4cf1aab48629f1b13656c0f68172e1cb398ae5aad1fcfaf7f709006\"},\"layers\":[{\"mediaType\":\"application/vnd.docker.image.rootfs.diff.tar\",\"size\":699392,\"digest\":\"sha256:83362ec78cab9ea69b1fd7cce8c7ba3164dab292c6df41c7dbcb6971d43d89e6\"}]}" time="2025-01-07T09:12:18-05:00" level=debug msg="Using SQLite blob info cache at /home/podman_basic_user/.local/share/containers/cache/blob-info-cache-v1.sqlite" time="2025-01-07T09:12:18-05:00" level=debug msg="IsRunningImageAllowed for image containers-storage:" time="2025-01-07T09:12:18-05:00" level=debug msg=" Using transport \"containers-storage\" policy section \"\"" time="2025-01-07T09:12:18-05:00" level=debug msg=" Requirement 0: allowed" time="2025-01-07T09:12:18-05:00" level=debug msg="Overall: allowed" time="2025-01-07T09:12:18-05:00" level=debug msg="start reading config" time="2025-01-07T09:12:18-05:00" level=debug msg="finished reading config" time="2025-01-07T09:12:18-05:00" level=debug msg="Manifest has MIME type application/vnd.oci.image.manifest.v1+json, ordered candidate list [application/vnd.oci.image.manifest.v1+json, application/vnd.docker.distribution.manifest.v2+json, application/vnd.docker.distribution.manifest.v1+prettyjws, application/vnd.docker.distribution.manifest.v1+json]" time="2025-01-07T09:12:18-05:00" level=debug msg="... will first try using the original manifest unmodified" time="2025-01-07T09:12:18-05:00" level=debug msg="Checking if we can reuse blob sha256:83362ec78cab9ea69b1fd7cce8c7ba3164dab292c6df41c7dbcb6971d43d89e6: general substitution = true, compression for MIME type \"application/vnd.oci.image.layer.v1.tar\" = true" time="2025-01-07T09:12:18-05:00" level=debug msg="reading layer \"sha256:83362ec78cab9ea69b1fd7cce8c7ba3164dab292c6df41c7dbcb6971d43d89e6\"" time="2025-01-07T09:12:18-05:00" level=debug msg="No compression detected" time="2025-01-07T09:12:18-05:00" level=debug msg="Using original blob without modification" time="2025-01-07T09:12:18-05:00" level=debug msg="Applying tar in /home/podman_basic_user/.local/share/containers/storage/overlay/83362ec78cab9ea69b1fd7cce8c7ba3164dab292c6df41c7dbcb6971d43d89e6/diff" time="2025-01-07T09:12:18-05:00" level=debug msg="finished reading layer \"sha256:83362ec78cab9ea69b1fd7cce8c7ba3164dab292c6df41c7dbcb6971d43d89e6\"" time="2025-01-07T09:12:18-05:00" level=debug msg="No compression detected" time="2025-01-07T09:12:18-05:00" level=debug msg="Compression change for blob sha256:5695e5c8db6bfee3f15e4db1f738ee3d7b797567d0f04ac6a3333ae36b2d1612 (\"application/vnd.oci.image.config.v1+json\") not supported" time="2025-01-07T09:12:18-05:00" level=debug msg="Using original blob without modification" time="2025-01-07T09:12:18-05:00" level=debug msg="setting image creation date to 2025-01-07 14:12:18.225948604 +0000 UTC" time="2025-01-07T09:12:18-05:00" level=debug msg="created new image ID \"5695e5c8db6bfee3f15e4db1f738ee3d7b797567d0f04ac6a3333ae36b2d1612\" with metadata \"{}\"" time="2025-01-07T09:12:18-05:00" level=debug msg="added name \"localhost/podman-pause:5.3.1-1733097600\" to image \"5695e5c8db6bfee3f15e4db1f738ee3d7b797567d0f04ac6a3333ae36b2d1612\"" time="2025-01-07T09:12:18-05:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:5.3.1-1733097600\"" time="2025-01-07T09:12:18-05:00" level=debug msg="printing final image id \"5695e5c8db6bfee3f15e4db1f738ee3d7b797567d0f04ac6a3333ae36b2d1612\"" time="2025-01-07T09:12:18-05:00" level=debug msg="Pod using bridge network mode" time="2025-01-07T09:12:18-05:00" level=debug msg="Created cgroup path user.slice/user-libpod_pod_7ee39644f44c212ed8c9468fbb32c7003e71e8cf99bc08bae81d105ea55f94f3.slice for parent user.slice and name libpod_pod_7ee39644f44c212ed8c9468fbb32c7003e71e8cf99bc08bae81d105ea55f94f3" time="2025-01-07T09:12:18-05:00" level=debug msg="Created cgroup user.slice/user-libpod_pod_7ee39644f44c212ed8c9468fbb32c7003e71e8cf99bc08bae81d105ea55f94f3.slice" time="2025-01-07T09:12:18-05:00" level=debug msg="Got pod cgroup as user.slice/user-3001.slice/user@3001.service/user.slice/user-libpod_pod_7ee39644f44c212ed8c9468fbb32c7003e71e8cf99bc08bae81d105ea55f94f3.slice" time="2025-01-07T09:12:18-05:00" level=debug msg="Looking up image \"localhost/podman-pause:5.3.1-1733097600\" in local containers storage" time="2025-01-07T09:12:18-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-01-07T09:12:18-05:00" level=debug msg="Trying \"localhost/podman-pause:5.3.1-1733097600\" ..." time="2025-01-07T09:12:18-05:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@5695e5c8db6bfee3f15e4db1f738ee3d7b797567d0f04ac6a3333ae36b2d1612\"" time="2025-01-07T09:12:18-05:00" level=debug msg="Found image \"localhost/podman-pause:5.3.1-1733097600\" as \"localhost/podman-pause:5.3.1-1733097600\" in local containers storage" time="2025-01-07T09:12:18-05:00" level=debug msg="Found image \"localhost/podman-pause:5.3.1-1733097600\" as \"localhost/podman-pause:5.3.1-1733097600\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@5695e5c8db6bfee3f15e4db1f738ee3d7b797567d0f04ac6a3333ae36b2d1612)" time="2025-01-07T09:12:18-05:00" level=debug msg="exporting opaque data as blob \"sha256:5695e5c8db6bfee3f15e4db1f738ee3d7b797567d0f04ac6a3333ae36b2d1612\"" time="2025-01-07T09:12:18-05:00" level=debug msg="Inspecting image 5695e5c8db6bfee3f15e4db1f738ee3d7b797567d0f04ac6a3333ae36b2d1612" time="2025-01-07T09:12:18-05:00" level=debug msg="exporting opaque data as blob \"sha256:5695e5c8db6bfee3f15e4db1f738ee3d7b797567d0f04ac6a3333ae36b2d1612\"" time="2025-01-07T09:12:18-05:00" level=debug msg="Inspecting image 5695e5c8db6bfee3f15e4db1f738ee3d7b797567d0f04ac6a3333ae36b2d1612" time="2025-01-07T09:12:18-05:00" level=debug msg="Inspecting image 5695e5c8db6bfee3f15e4db1f738ee3d7b797567d0f04ac6a3333ae36b2d1612" time="2025-01-07T09:12:18-05:00" level=debug msg="using systemd mode: false" time="2025-01-07T09:12:18-05:00" level=debug msg="setting container name 7ee39644f44c-infra" time="2025-01-07T09:12:18-05:00" level=debug msg="Loading seccomp profile from \"/usr/share/containers/seccomp.json\"" time="2025-01-07T09:12:18-05:00" level=debug msg="Successfully loaded network podman-default-kube-network: &{podman-default-kube-network cafde5e4b8d539fd25d14051143b0ea2f2ee26225bafd0e8d7a632b06c900ecb bridge podman1 2025-01-07 09:12:18.053358397 -0500 EST [{{{10.89.0.0 ffffff00}} 10.89.0.1 }] [] false false true [] map[] map[] map[driver:host-local]}" time="2025-01-07T09:12:18-05:00" level=debug msg="Successfully loaded 2 networks" time="2025-01-07T09:12:18-05:00" level=debug msg="Allocated lock 1 for container bce005bb46a5e13ccfecf7373d40775f878fcdddc0a8a4651d9fd838ef5979d0" time="2025-01-07T09:12:18-05:00" level=debug msg="exporting opaque data as blob \"sha256:5695e5c8db6bfee3f15e4db1f738ee3d7b797567d0f04ac6a3333ae36b2d1612\"" time="2025-01-07T09:12:18-05:00" level=debug msg="Created container \"bce005bb46a5e13ccfecf7373d40775f878fcdddc0a8a4651d9fd838ef5979d0\"" time="2025-01-07T09:12:18-05:00" level=debug msg="Container \"bce005bb46a5e13ccfecf7373d40775f878fcdddc0a8a4651d9fd838ef5979d0\" has work directory \"/home/podman_basic_user/.local/share/containers/storage/overlay-containers/bce005bb46a5e13ccfecf7373d40775f878fcdddc0a8a4651d9fd838ef5979d0/userdata\"" time="2025-01-07T09:12:18-05:00" level=debug msg="Container \"bce005bb46a5e13ccfecf7373d40775f878fcdddc0a8a4651d9fd838ef5979d0\" has run directory \"/run/user/3001/containers/overlay-containers/bce005bb46a5e13ccfecf7373d40775f878fcdddc0a8a4651d9fd838ef5979d0/userdata\"" time="2025-01-07T09:12:18-05:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-01-07T09:12:18-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-01-07T09:12:18-05:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2025-01-07T09:12:18-05:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-07T09:12:18-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-01-07T09:12:18-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2025-01-07T09:12:18-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-07T09:12:18-05:00" level=debug msg="Pulling image quay.io/libpod/testimage:20210610 (policy: missing)" time="2025-01-07T09:12:18-05:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-01-07T09:12:18-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-01-07T09:12:18-05:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2025-01-07T09:12:18-05:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-07T09:12:18-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-01-07T09:12:18-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2025-01-07T09:12:18-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-07T09:12:18-05:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-01-07T09:12:18-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-01-07T09:12:18-05:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2025-01-07T09:12:18-05:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-07T09:12:18-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-01-07T09:12:18-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2025-01-07T09:12:18-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-07T09:12:18-05:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2025-01-07T09:12:18-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-07T09:12:18-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-07T09:12:18-05:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-01-07T09:12:18-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-01-07T09:12:18-05:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2025-01-07T09:12:18-05:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-07T09:12:18-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-01-07T09:12:18-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2025-01-07T09:12:18-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-07T09:12:18-05:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2025-01-07T09:12:18-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-07T09:12:18-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-07T09:12:18-05:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2025-01-07T09:12:18-05:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2025-01-07T09:12:18-05:00" level=debug msg="using systemd mode: false" time="2025-01-07T09:12:18-05:00" level=debug msg="adding container to pod httpd1" time="2025-01-07T09:12:18-05:00" level=debug msg="setting container name httpd1-httpd1" time="2025-01-07T09:12:18-05:00" level=debug msg="Loading seccomp profile from \"/usr/share/containers/seccomp.json\"" time="2025-01-07T09:12:18-05:00" level=info msg="Sysctl net.ipv4.ping_group_range=0 0 ignored in containers.conf, since Network Namespace set to host" time="2025-01-07T09:12:18-05:00" level=debug msg="Adding mount /proc" time="2025-01-07T09:12:18-05:00" level=debug msg="Adding mount /dev" time="2025-01-07T09:12:18-05:00" level=debug msg="Adding mount /dev/pts" time="2025-01-07T09:12:18-05:00" level=debug msg="Adding mount /dev/mqueue" time="2025-01-07T09:12:18-05:00" level=debug msg="Adding mount /sys" time="2025-01-07T09:12:18-05:00" level=debug msg="Adding mount /sys/fs/cgroup" time="2025-01-07T09:12:18-05:00" level=debug msg="Allocated lock 2 for container 601607edc0bad5ac48b6a946a7023c40d08491d57765f188c572ad31abeeb41a" time="2025-01-07T09:12:18-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-07T09:12:18-05:00" level=debug msg="Created container \"601607edc0bad5ac48b6a946a7023c40d08491d57765f188c572ad31abeeb41a\"" time="2025-01-07T09:12:18-05:00" level=debug msg="Container \"601607edc0bad5ac48b6a946a7023c40d08491d57765f188c572ad31abeeb41a\" has work directory \"/home/podman_basic_user/.local/share/containers/storage/overlay-containers/601607edc0bad5ac48b6a946a7023c40d08491d57765f188c572ad31abeeb41a/userdata\"" time="2025-01-07T09:12:18-05:00" level=debug msg="Container \"601607edc0bad5ac48b6a946a7023c40d08491d57765f188c572ad31abeeb41a\" has run directory \"/run/user/3001/containers/overlay-containers/601607edc0bad5ac48b6a946a7023c40d08491d57765f188c572ad31abeeb41a/userdata\"" time="2025-01-07T09:12:18-05:00" level=debug msg="Strongconnecting node bce005bb46a5e13ccfecf7373d40775f878fcdddc0a8a4651d9fd838ef5979d0" time="2025-01-07T09:12:18-05:00" level=debug msg="Pushed bce005bb46a5e13ccfecf7373d40775f878fcdddc0a8a4651d9fd838ef5979d0 onto stack" time="2025-01-07T09:12:18-05:00" level=debug msg="Finishing node bce005bb46a5e13ccfecf7373d40775f878fcdddc0a8a4651d9fd838ef5979d0. Popped bce005bb46a5e13ccfecf7373d40775f878fcdddc0a8a4651d9fd838ef5979d0 off stack" time="2025-01-07T09:12:18-05:00" level=debug msg="Strongconnecting node 601607edc0bad5ac48b6a946a7023c40d08491d57765f188c572ad31abeeb41a" time="2025-01-07T09:12:18-05:00" level=debug msg="Pushed 601607edc0bad5ac48b6a946a7023c40d08491d57765f188c572ad31abeeb41a onto stack" time="2025-01-07T09:12:18-05:00" level=debug msg="Finishing node 601607edc0bad5ac48b6a946a7023c40d08491d57765f188c572ad31abeeb41a. Popped 601607edc0bad5ac48b6a946a7023c40d08491d57765f188c572ad31abeeb41a off stack" time="2025-01-07T09:12:18-05:00" level=debug msg="overlay: mount_data=lowerdir=/home/podman_basic_user/.local/share/containers/storage/overlay/l/PCNV6S4VBI3WTFCEGML2VVFLBY,upperdir=/home/podman_basic_user/.local/share/containers/storage/overlay/2d547bdbebb50560278983b8d7b53c59ce086159f616cc1759969373b402b864/diff,workdir=/home/podman_basic_user/.local/share/containers/storage/overlay/2d547bdbebb50560278983b8d7b53c59ce086159f616cc1759969373b402b864/work,userxattr,context=\"system_u:object_r:container_file_t:s0:c515,c561\"" time="2025-01-07T09:12:18-05:00" level=debug msg="Made network namespace at /run/user/3001/netns/netns-c7bfc11a-f7f2-ab65-e293-fb6f673f11a0 for container bce005bb46a5e13ccfecf7373d40775f878fcdddc0a8a4651d9fd838ef5979d0" time="2025-01-07T09:12:18-05:00" level=debug msg="Mounted container \"bce005bb46a5e13ccfecf7373d40775f878fcdddc0a8a4651d9fd838ef5979d0\" at \"/home/podman_basic_user/.local/share/containers/storage/overlay/2d547bdbebb50560278983b8d7b53c59ce086159f616cc1759969373b402b864/merged\"" time="2025-01-07T09:12:18-05:00" level=debug msg="Created root filesystem for container bce005bb46a5e13ccfecf7373d40775f878fcdddc0a8a4651d9fd838ef5979d0 at /home/podman_basic_user/.local/share/containers/storage/overlay/2d547bdbebb50560278983b8d7b53c59ce086159f616cc1759969373b402b864/merged" time="2025-01-07T09:12:18-05:00" level=debug msg="Creating rootless network namespace at \"/run/user/3001/containers/networks/rootless-netns/rootless-netns\"" time="2025-01-07T09:12:18-05:00" level=debug msg="pasta arguments: --config-net --pid /run/user/3001/containers/networks/rootless-netns/rootless-netns-conn.pid --dns-forward 169.254.1.1 -t none -u none -T none -U none --no-map-gw --quiet --netns /run/user/3001/containers/networks/rootless-netns/rootless-netns --map-guest-addr 169.254.1.2" time="2025-01-07T09:12:18-05:00" level=debug msg="The path of /etc/resolv.conf in the mount ns is \"/etc/resolv.conf\"" [DEBUG netavark::network::validation] Validating network namespace... [DEBUG netavark::commands::setup] Setting up... [INFO netavark::firewall] Using nftables firewall driver [DEBUG netavark::network::bridge] Setup network podman-default-kube-network [DEBUG netavark::network::bridge] Container interface name: eth0 with IP addresses [10.89.0.2/24] [DEBUG netavark::network::bridge] Bridge name: podman1 with IP addresses [10.89.0.1/24] [DEBUG netavark::network::core_utils] Setting sysctl value for net.ipv4.ip_forward to 1 [DEBUG netavark::network::core_utils] Setting sysctl value for /proc/sys/net/ipv4/conf/podman1/rp_filter to 2 [DEBUG netavark::network::core_utils] Setting sysctl value for /proc/sys/net/ipv6/conf/eth0/autoconf to 0 [DEBUG netavark::network::core_utils] Setting sysctl value for /proc/sys/net/ipv4/conf/eth0/arp_notify to 1 [DEBUG netavark::network::core_utils] Setting sysctl value for /proc/sys/net/ipv4/conf/eth0/rp_filter to 2 [INFO netavark::network::netlink] Adding route (dest: 0.0.0.0/0 ,gw: 10.89.0.1, metric 100) [INFO netavark::firewall::nft] Creating container chain nv_cafde5e4_10_89_0_0_nm24 [DEBUG netavark::network::core_utils] Setting sysctl value for net.ipv4.conf.podman1.route_localnet to 1 [DEBUG netavark::dns::aardvark] Spawning aardvark server [DEBUG netavark::dns::aardvark] start aardvark-dns: ["systemd-run", "-q", "--scope", "--user", "/usr/libexec/podman/aardvark-dns", "--config", "/run/user/3001/containers/networks/aardvark-dns", "-p", "53", "run"] [DEBUG netavark::commands::setup] { "podman-default-kube-network": StatusBlock { dns_search_domains: Some( [ "dns.podman", ], ), dns_server_ips: Some( [ 10.89.0.1, ], ), interfaces: Some( { "eth0": NetInterface { mac_address: "de:0a:de:23:41:22", subnets: Some( [ NetAddress { gateway: Some( 10.89.0.1, ), ipnet: 10.89.0.2/24, }, ], ), }, }, ), }, } [DEBUG netavark::commands::setup] Setup complete time="2025-01-07T09:12:18-05:00" level=debug msg="rootlessport: time=\"2025-01-07T09:12:18-05:00\" level=info msg=\"Starting parent driver\"\ntime=\"2025-01-07T09:12:18-05:00\" level=info msg=\"opaque=map[builtin.readypipepath:/run/user/3001/libpod/tmp/rootlessport3024916983/.bp-ready.pipe builtin.socketpath:/run/user/3001/libpod/tmp/rootlessport3024916983/.bp.sock]\"\n" time="2025-01-07T09:12:18-05:00" level=debug msg="rootlessport: time=\"2025-01-07T09:12:18-05:00\" level=info msg=\"Starting child driver in child netns (\\\"/proc/self/exe\\\" [rootlessport-child])\"\n" time="2025-01-07T09:12:18-05:00" level=debug msg="rootlessport: time=\"2025-01-07T09:12:18-05:00\" level=info msg=\"Waiting for initComplete\"\n" time="2025-01-07T09:12:18-05:00" level=debug msg="rootlessport: time=\"2025-01-07T09:12:18-05:00\" level=info msg=\"initComplete is closed; parent and child established the communication channel\"\n" time="2025-01-07T09:12:18-05:00" level=debug msg="rootlessport: time=\"2025-01-07T09:12:18-05:00\" level=info msg=\"Exposing ports [{ 80 15001 1 tcp}]\"\n" time="2025-01-07T09:12:18-05:00" level=debug msg="rootlessport: time=\"2025-01-07T09:12:18-05:00\" level=info msg=Ready\n" time="2025-01-07T09:12:18-05:00" level=debug msg="rootlessport is ready" time="2025-01-07T09:12:18-05:00" level=debug msg="/proc/sys/crypto/fips_enabled does not contain '1', not adding FIPS mode bind mounts" time="2025-01-07T09:12:18-05:00" level=debug msg="Setting Cgroups for container bce005bb46a5e13ccfecf7373d40775f878fcdddc0a8a4651d9fd838ef5979d0 to user-libpod_pod_7ee39644f44c212ed8c9468fbb32c7003e71e8cf99bc08bae81d105ea55f94f3.slice:libpod:bce005bb46a5e13ccfecf7373d40775f878fcdddc0a8a4651d9fd838ef5979d0" time="2025-01-07T09:12:18-05:00" level=debug msg="reading hooks from /usr/share/containers/oci/hooks.d" time="2025-01-07T09:12:18-05:00" level=debug msg="Workdir \"/\" resolved to host path \"/home/podman_basic_user/.local/share/containers/storage/overlay/2d547bdbebb50560278983b8d7b53c59ce086159f616cc1759969373b402b864/merged\"" time="2025-01-07T09:12:18-05:00" level=debug msg="Created OCI spec for container bce005bb46a5e13ccfecf7373d40775f878fcdddc0a8a4651d9fd838ef5979d0 at /home/podman_basic_user/.local/share/containers/storage/overlay-containers/bce005bb46a5e13ccfecf7373d40775f878fcdddc0a8a4651d9fd838ef5979d0/userdata/config.json" time="2025-01-07T09:12:18-05:00" level=debug msg="Created cgroup path user.slice/user-libpod_pod_7ee39644f44c212ed8c9468fbb32c7003e71e8cf99bc08bae81d105ea55f94f3.slice for parent user.slice and name libpod_pod_7ee39644f44c212ed8c9468fbb32c7003e71e8cf99bc08bae81d105ea55f94f3" time="2025-01-07T09:12:18-05:00" level=debug msg="Created cgroup user.slice/user-libpod_pod_7ee39644f44c212ed8c9468fbb32c7003e71e8cf99bc08bae81d105ea55f94f3.slice" time="2025-01-07T09:12:18-05:00" level=debug msg="Got pod cgroup as user.slice/user-3001.slice/user@3001.service/user.slice/user-libpod_pod_7ee39644f44c212ed8c9468fbb32c7003e71e8cf99bc08bae81d105ea55f94f3.slice" time="2025-01-07T09:12:18-05:00" level=debug msg="/usr/bin/conmon messages will be logged to syslog" time="2025-01-07T09:12:18-05:00" level=debug msg="running conmon: /usr/bin/conmon" args="[--api-version 1 -c bce005bb46a5e13ccfecf7373d40775f878fcdddc0a8a4651d9fd838ef5979d0 -u bce005bb46a5e13ccfecf7373d40775f878fcdddc0a8a4651d9fd838ef5979d0 -r /usr/bin/crun -b /home/podman_basic_user/.local/share/containers/storage/overlay-containers/bce005bb46a5e13ccfecf7373d40775f878fcdddc0a8a4651d9fd838ef5979d0/userdata -p /run/user/3001/containers/overlay-containers/bce005bb46a5e13ccfecf7373d40775f878fcdddc0a8a4651d9fd838ef5979d0/userdata/pidfile -n 7ee39644f44c-infra --exit-dir /run/user/3001/libpod/tmp/exits --persist-dir /run/user/3001/libpod/tmp/persist/bce005bb46a5e13ccfecf7373d40775f878fcdddc0a8a4651d9fd838ef5979d0 --full-attach -s -l k8s-file:/home/podman_basic_user/.local/share/containers/storage/overlay-containers/bce005bb46a5e13ccfecf7373d40775f878fcdddc0a8a4651d9fd838ef5979d0/userdata/ctr.log --log-level debug --syslog --conmon-pidfile /run/user/3001/containers/overlay-containers/bce005bb46a5e13ccfecf7373d40775f878fcdddc0a8a4651d9fd838ef5979d0/userdata/conmon.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /home/podman_basic_user/.local/share/containers/storage --exit-command-arg --runroot --exit-command-arg /run/user/3001/containers --exit-command-arg --log-level --exit-command-arg debug --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/user/3001/libpod/tmp --exit-command-arg --network-config-dir --exit-command-arg --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /home/podman_basic_user/.local/share/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --events-backend --exit-command-arg file --exit-command-arg --syslog --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg bce005bb46a5e13ccfecf7373d40775f878fcdddc0a8a4651d9fd838ef5979d0]" time="2025-01-07T09:12:18-05:00" level=info msg="Running conmon under slice user-libpod_pod_7ee39644f44c212ed8c9468fbb32c7003e71e8cf99bc08bae81d105ea55f94f3.slice and unitName libpod-conmon-bce005bb46a5e13ccfecf7373d40775f878fcdddc0a8a4651d9fd838ef5979d0.scope" [conmon:d]: failed to write to /proc/self/oom_score_adj: Permission denied time="2025-01-07T09:12:18-05:00" level=debug msg="Received: 24341" time="2025-01-07T09:12:18-05:00" level=info msg="Got Conmon PID as 24339" time="2025-01-07T09:12:18-05:00" level=debug msg="Created container bce005bb46a5e13ccfecf7373d40775f878fcdddc0a8a4651d9fd838ef5979d0 in OCI runtime" time="2025-01-07T09:12:18-05:00" level=debug msg="Adding nameserver(s) from network status of '[\"10.89.0.1\"]'" time="2025-01-07T09:12:18-05:00" level=debug msg="Adding search domain(s) from network status of '[\"dns.podman\"]'" time="2025-01-07T09:12:18-05:00" level=debug msg="Starting container bce005bb46a5e13ccfecf7373d40775f878fcdddc0a8a4651d9fd838ef5979d0 with command [/catatonit -P]" time="2025-01-07T09:12:18-05:00" level=debug msg="Started container bce005bb46a5e13ccfecf7373d40775f878fcdddc0a8a4651d9fd838ef5979d0" time="2025-01-07T09:12:18-05:00" level=debug msg="overlay: mount_data=lowerdir=/home/podman_basic_user/.local/share/containers/storage/overlay/l/TK3CDRREGBBMRE5LHFO2QTPNJ2,upperdir=/home/podman_basic_user/.local/share/containers/storage/overlay/a30b32a7353f58f524ea4500fa87625888bbea4ca81c6c02ea01fc00360b92b4/diff,workdir=/home/podman_basic_user/.local/share/containers/storage/overlay/a30b32a7353f58f524ea4500fa87625888bbea4ca81c6c02ea01fc00360b92b4/work,userxattr,context=\"system_u:object_r:container_file_t:s0:c515,c561\"" time="2025-01-07T09:12:18-05:00" level=debug msg="Mounted container \"601607edc0bad5ac48b6a946a7023c40d08491d57765f188c572ad31abeeb41a\" at \"/home/podman_basic_user/.local/share/containers/storage/overlay/a30b32a7353f58f524ea4500fa87625888bbea4ca81c6c02ea01fc00360b92b4/merged\"" time="2025-01-07T09:12:18-05:00" level=debug msg="Created root filesystem for container 601607edc0bad5ac48b6a946a7023c40d08491d57765f188c572ad31abeeb41a at /home/podman_basic_user/.local/share/containers/storage/overlay/a30b32a7353f58f524ea4500fa87625888bbea4ca81c6c02ea01fc00360b92b4/merged" time="2025-01-07T09:12:18-05:00" level=debug msg="/proc/sys/crypto/fips_enabled does not contain '1', not adding FIPS mode bind mounts" time="2025-01-07T09:12:18-05:00" level=debug msg="Setting Cgroups for container 601607edc0bad5ac48b6a946a7023c40d08491d57765f188c572ad31abeeb41a to user-libpod_pod_7ee39644f44c212ed8c9468fbb32c7003e71e8cf99bc08bae81d105ea55f94f3.slice:libpod:601607edc0bad5ac48b6a946a7023c40d08491d57765f188c572ad31abeeb41a" time="2025-01-07T09:12:18-05:00" level=debug msg="reading hooks from /usr/share/containers/oci/hooks.d" time="2025-01-07T09:12:18-05:00" level=debug msg="Workdir \"/var/www\" resolved to a volume or mount" time="2025-01-07T09:12:18-05:00" level=debug msg="Created OCI spec for container 601607edc0bad5ac48b6a946a7023c40d08491d57765f188c572ad31abeeb41a at /home/podman_basic_user/.local/share/containers/storage/overlay-containers/601607edc0bad5ac48b6a946a7023c40d08491d57765f188c572ad31abeeb41a/userdata/config.json" time="2025-01-07T09:12:18-05:00" level=debug msg="Created cgroup path user.slice/user-libpod_pod_7ee39644f44c212ed8c9468fbb32c7003e71e8cf99bc08bae81d105ea55f94f3.slice for parent user.slice and name libpod_pod_7ee39644f44c212ed8c9468fbb32c7003e71e8cf99bc08bae81d105ea55f94f3" time="2025-01-07T09:12:18-05:00" level=debug msg="Created cgroup user.slice/user-libpod_pod_7ee39644f44c212ed8c9468fbb32c7003e71e8cf99bc08bae81d105ea55f94f3.slice" time="2025-01-07T09:12:18-05:00" level=debug msg="Got pod cgroup as user.slice/user-3001.slice/user@3001.service/user.slice/user-libpod_pod_7ee39644f44c212ed8c9468fbb32c7003e71e8cf99bc08bae81d105ea55f94f3.slice" time="2025-01-07T09:12:18-05:00" level=debug msg="/usr/bin/conmon messages will be logged to syslog" time="2025-01-07T09:12:18-05:00" level=debug msg="running conmon: /usr/bin/conmon" args="[--api-version 1 -c 601607edc0bad5ac48b6a946a7023c40d08491d57765f188c572ad31abeeb41a -u 601607edc0bad5ac48b6a946a7023c40d08491d57765f188c572ad31abeeb41a -r /usr/bin/crun -b /home/podman_basic_user/.local/share/containers/storage/overlay-containers/601607edc0bad5ac48b6a946a7023c40d08491d57765f188c572ad31abeeb41a/userdata -p /run/user/3001/containers/overlay-containers/601607edc0bad5ac48b6a946a7023c40d08491d57765f188c572ad31abeeb41a/userdata/pidfile -n httpd1-httpd1 --exit-dir /run/user/3001/libpod/tmp/exits --persist-dir /run/user/3001/libpod/tmp/persist/601607edc0bad5ac48b6a946a7023c40d08491d57765f188c572ad31abeeb41a --full-attach -s -l k8s-file:/home/podman_basic_user/.local/share/containers/storage/overlay-containers/601607edc0bad5ac48b6a946a7023c40d08491d57765f188c572ad31abeeb41a/userdata/ctr.log --log-level debug --syslog --conmon-pidfile /run/user/3001/containers/overlay-containers/601607edc0bad5ac48b6a946a7023c40d08491d57765f188c572ad31abeeb41a/userdata/conmon.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /home/podman_basic_user/.local/share/containers/storage --exit-command-arg --runroot --exit-command-arg /run/user/3001/containers --exit-command-arg --log-level --exit-command-arg debug --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/user/3001/libpod/tmp --exit-command-arg --network-config-dir --exit-command-arg --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /home/podman_basic_user/.local/share/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --events-backend --exit-command-arg file --exit-command-arg --syslog --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg 601607edc0bad5ac48b6a946a7023c40d08491d57765f188c572ad31abeeb41a]" time="2025-01-07T09:12:18-05:00" level=info msg="Running conmon under slice user-libpod_pod_7ee39644f44c212ed8c9468fbb32c7003e71e8cf99bc08bae81d105ea55f94f3.slice and unitName libpod-conmon-601607edc0bad5ac48b6a946a7023c40d08491d57765f188c572ad31abeeb41a.scope" [conmon:d]: failed to write to /proc/self/oom_score_adj: Permission denied time="2025-01-07T09:12:18-05:00" level=debug msg="Received: 24347" time="2025-01-07T09:12:18-05:00" level=info msg="Got Conmon PID as 24344" time="2025-01-07T09:12:18-05:00" level=debug msg="Created container 601607edc0bad5ac48b6a946a7023c40d08491d57765f188c572ad31abeeb41a in OCI runtime" time="2025-01-07T09:12:18-05:00" level=debug msg="Starting container 601607edc0bad5ac48b6a946a7023c40d08491d57765f188c572ad31abeeb41a with command [/bin/busybox-extras httpd -f -p 80]" time="2025-01-07T09:12:18-05:00" level=debug msg="Started container 601607edc0bad5ac48b6a946a7023c40d08491d57765f188c572ad31abeeb41a" time="2025-01-07T09:12:18-05:00" level=debug msg="Called kube.PersistentPostRunE(/bin/podman play kube --start=true --log-level=debug /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml)" time="2025-01-07T09:12:18-05:00" level=debug msg="Shutting down engines" time="2025-01-07T09:12:18-05:00" level=info msg="Received shutdown.Stop(), terminating!" PID=24240 Jan 07 09:12:18 managed-node2 python3.12[24233]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE rc: 0 Jan 07 09:12:18 managed-node2 sudo[24230]: pam_unix(sudo:session): session closed for user podman_basic_user Jan 07 09:12:19 managed-node2 sudo[24521]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-bqytykouajhgmdahwlaftzhdelfjixfe ; XDG_RUNTIME_DIR=/run/user/3001 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736259138.979414-15371-200801400085149/AnsiballZ_systemd.py' Jan 07 09:12:19 managed-node2 sudo[24521]: pam_systemd(sudo:session): New sd-bus connection (system-bus-pam-systemd-24521) opened. Jan 07 09:12:19 managed-node2 sudo[24521]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0) Jan 07 09:12:19 managed-node2 python3.12[24524]: ansible-systemd Invoked with daemon_reload=True scope=user daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 07 09:12:19 managed-node2 systemd[23047]: Reload requested from client PID 24525 ('systemctl')... Jan 07 09:12:19 managed-node2 systemd[23047]: Reloading... Jan 07 09:12:19 managed-node2 systemd[23047]: Reloading finished in 44 ms. Jan 07 09:12:19 managed-node2 sudo[24521]: pam_unix(sudo:session): session closed for user podman_basic_user Jan 07 09:12:19 managed-node2 sudo[24707]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-efqzexmaanhcvrywxycthvkoeunbtoqc ; XDG_RUNTIME_DIR=/run/user/3001 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736259139.7113974-15414-201876225858933/AnsiballZ_systemd.py' Jan 07 09:12:19 managed-node2 sudo[24707]: pam_systemd(sudo:session): New sd-bus connection (system-bus-pam-systemd-24707) opened. Jan 07 09:12:19 managed-node2 sudo[24707]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0) Jan 07 09:12:20 managed-node2 python3.12[24710]: ansible-systemd Invoked with name=podman-kube@-home-podman_basic_user-.config-containers-ansible\x2dkubernetes.d-httpd1.yml.service scope=user enabled=True daemon_reload=False daemon_reexec=False no_block=False state=None force=None masked=None Jan 07 09:12:20 managed-node2 systemd[23047]: Reload requested from client PID 24713 ('systemctl')... Jan 07 09:12:20 managed-node2 systemd[23047]: Reloading... Jan 07 09:12:20 managed-node2 systemd[23047]: Reloading finished in 44 ms. Jan 07 09:12:20 managed-node2 sudo[24707]: pam_unix(sudo:session): session closed for user podman_basic_user Jan 07 09:12:20 managed-node2 sudo[24895]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-jjdbmfgtkwigowclxuedzcwfckspdogg ; XDG_RUNTIME_DIR=/run/user/3001 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736259140.495615-15451-280969617872416/AnsiballZ_systemd.py' Jan 07 09:12:20 managed-node2 sudo[24895]: pam_systemd(sudo:session): New sd-bus connection (system-bus-pam-systemd-24895) opened. Jan 07 09:12:20 managed-node2 sudo[24895]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0) Jan 07 09:12:20 managed-node2 python3.12[24898]: ansible-systemd Invoked with name=podman-kube@-home-podman_basic_user-.config-containers-ansible\x2dkubernetes.d-httpd1.yml.service scope=user state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Jan 07 09:12:20 managed-node2 systemd[23047]: Created slice app-podman\x2dkube.slice - Slice /app/podman-kube. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 83. Jan 07 09:12:20 managed-node2 systemd[23047]: Starting podman-kube@-home-podman_basic_user-.config-containers-ansible\x2dkubernetes.d-httpd1.yml.service - A template for running K8s workloads via podman-kube-play... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 71. Jan 07 09:12:21 managed-node2 aardvark-dns[24323]: Received SIGHUP Jan 07 09:12:21 managed-node2 aardvark-dns[24323]: Successfully parsed config Jan 07 09:12:21 managed-node2 aardvark-dns[24323]: Listen v4 ip {} Jan 07 09:12:21 managed-node2 aardvark-dns[24323]: Listen v6 ip {} Jan 07 09:12:21 managed-node2 aardvark-dns[24323]: No configuration found stopping the sever Jan 07 09:12:21 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Jan 07 09:12:21 managed-node2 kernel: veth0 (unregistering): left allmulticast mode Jan 07 09:12:21 managed-node2 kernel: veth0 (unregistering): left promiscuous mode Jan 07 09:12:21 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Jan 07 09:12:21 managed-node2 /usr/bin/podman[24911]: time="2025-01-07T09:12:21-05:00" level=debug msg="Called cleanup.PersistentPreRunE(/usr/bin/podman --root /home/podman_basic_user/.local/share/containers/storage --runroot /run/user/3001/containers --log-level debug --cgroup-manager systemd --tmpdir /run/user/3001/libpod/tmp --network-config-dir --network-backend netavark --volumepath /home/podman_basic_user/.local/share/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime crun --storage-driver overlay --events-backend file --syslog container cleanup --stopped-only bce005bb46a5e13ccfecf7373d40775f878fcdddc0a8a4651d9fd838ef5979d0)" Jan 07 09:12:21 managed-node2 /usr/bin/podman[24911]: time="2025-01-07T09:12:21-05:00" level=debug msg="Setting custom database backend: \"sqlite\"" Jan 07 09:12:21 managed-node2 /usr/bin/podman[24911]: time="2025-01-07T09:12:21-05:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" Jan 07 09:12:21 managed-node2 /usr/bin/podman[24911]: time="2025-01-07T09:12:21-05:00" level=info msg="Using sqlite as database backend" Jan 07 09:12:21 managed-node2 /usr/bin/podman[24911]: time="2025-01-07T09:12:21-05:00" level=debug msg="systemd-logind: Unknown object '/'." Jan 07 09:12:21 managed-node2 /usr/bin/podman[24911]: time="2025-01-07T09:12:21-05:00" level=debug msg="Using graph driver overlay" Jan 07 09:12:21 managed-node2 /usr/bin/podman[24911]: time="2025-01-07T09:12:21-05:00" level=debug msg="Using graph root /home/podman_basic_user/.local/share/containers/storage" Jan 07 09:12:21 managed-node2 /usr/bin/podman[24911]: time="2025-01-07T09:12:21-05:00" level=debug msg="Using run root /run/user/3001/containers" Jan 07 09:12:21 managed-node2 /usr/bin/podman[24911]: time="2025-01-07T09:12:21-05:00" level=debug msg="Using static dir /home/podman_basic_user/.local/share/containers/storage/libpod" Jan 07 09:12:21 managed-node2 /usr/bin/podman[24911]: time="2025-01-07T09:12:21-05:00" level=debug msg="Using tmp dir /run/user/3001/libpod/tmp" Jan 07 09:12:21 managed-node2 /usr/bin/podman[24911]: time="2025-01-07T09:12:21-05:00" level=debug msg="Using volume path /home/podman_basic_user/.local/share/containers/storage/volumes" Jan 07 09:12:21 managed-node2 /usr/bin/podman[24911]: time="2025-01-07T09:12:21-05:00" level=debug msg="Using transient store: false" Jan 07 09:12:21 managed-node2 /usr/bin/podman[24911]: time="2025-01-07T09:12:21-05:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" Jan 07 09:12:21 managed-node2 /usr/bin/podman[24911]: time="2025-01-07T09:12:21-05:00" level=debug msg="Cached value indicated that overlay is supported" Jan 07 09:12:21 managed-node2 /usr/bin/podman[24911]: time="2025-01-07T09:12:21-05:00" level=debug msg="Cached value indicated that overlay is supported" Jan 07 09:12:21 managed-node2 /usr/bin/podman[24911]: time="2025-01-07T09:12:21-05:00" level=debug msg="Cached value indicated that metacopy is not being used" Jan 07 09:12:21 managed-node2 /usr/bin/podman[24911]: time="2025-01-07T09:12:21-05:00" level=debug msg="Cached value indicated that native-diff is usable" Jan 07 09:12:21 managed-node2 /usr/bin/podman[24911]: time="2025-01-07T09:12:21-05:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=true, usingMetacopy=false" Jan 07 09:12:21 managed-node2 /usr/bin/podman[24911]: time="2025-01-07T09:12:21-05:00" level=debug msg="Initializing event backend file" Jan 07 09:12:21 managed-node2 /usr/bin/podman[24911]: time="2025-01-07T09:12:21-05:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" Jan 07 09:12:21 managed-node2 /usr/bin/podman[24911]: time="2025-01-07T09:12:21-05:00" level=debug msg="Configured OCI runtime crun-vm initialization failed: no valid executable found for OCI runtime crun-vm: invalid argument" Jan 07 09:12:21 managed-node2 /usr/bin/podman[24911]: time="2025-01-07T09:12:21-05:00" level=debug msg="Configured OCI runtime runc initialization failed: no valid executable found for OCI runtime runc: invalid argument" Jan 07 09:12:21 managed-node2 /usr/bin/podman[24911]: time="2025-01-07T09:12:21-05:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" Jan 07 09:12:21 managed-node2 /usr/bin/podman[24911]: time="2025-01-07T09:12:21-05:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" Jan 07 09:12:21 managed-node2 /usr/bin/podman[24911]: time="2025-01-07T09:12:21-05:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" Jan 07 09:12:21 managed-node2 /usr/bin/podman[24911]: time="2025-01-07T09:12:21-05:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" Jan 07 09:12:21 managed-node2 /usr/bin/podman[24911]: time="2025-01-07T09:12:21-05:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" Jan 07 09:12:21 managed-node2 /usr/bin/podman[24911]: time="2025-01-07T09:12:21-05:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" Jan 07 09:12:21 managed-node2 /usr/bin/podman[24911]: time="2025-01-07T09:12:21-05:00" level=debug msg="Using OCI runtime \"/usr/bin/crun\"" Jan 07 09:12:21 managed-node2 /usr/bin/podman[24911]: time="2025-01-07T09:12:21-05:00" level=info msg="Setting parallel job count to 7" Jan 07 09:12:21 managed-node2 /usr/bin/podman[24911]: time="2025-01-07T09:12:21-05:00" level=debug msg="Called cleanup.PersistentPostRunE(/usr/bin/podman --root /home/podman_basic_user/.local/share/containers/storage --runroot /run/user/3001/containers --log-level debug --cgroup-manager systemd --tmpdir /run/user/3001/libpod/tmp --network-config-dir --network-backend netavark --volumepath /home/podman_basic_user/.local/share/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime crun --storage-driver overlay --events-backend file --syslog container cleanup --stopped-only bce005bb46a5e13ccfecf7373d40775f878fcdddc0a8a4651d9fd838ef5979d0)" Jan 07 09:12:21 managed-node2 /usr/bin/podman[24911]: time="2025-01-07T09:12:21-05:00" level=debug msg="Shutting down engines" Jan 07 09:12:21 managed-node2 /usr/bin/podman[24911]: time="2025-01-07T09:12:21-05:00" level=info msg="Received shutdown.Stop(), terminating!" PID=24911 Jan 07 09:12:31 managed-node2 podman[24901]: time="2025-01-07T09:12:31-05:00" level=warning msg="StopSignal SIGTERM failed to stop container httpd1-httpd1 in 10 seconds, resorting to SIGKILL" Jan 07 09:12:31 managed-node2 conmon[24344]: conmon 601607edc0bad5ac48b6 : container 24347 exited with status 137 Jan 07 09:12:31 managed-node2 /usr/bin/podman[24930]: time="2025-01-07T09:12:31-05:00" level=debug msg="Called cleanup.PersistentPreRunE(/usr/bin/podman --root /home/podman_basic_user/.local/share/containers/storage --runroot /run/user/3001/containers --log-level debug --cgroup-manager systemd --tmpdir /run/user/3001/libpod/tmp --network-config-dir --network-backend netavark --volumepath /home/podman_basic_user/.local/share/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime crun --storage-driver overlay --events-backend file --syslog container cleanup --stopped-only 601607edc0bad5ac48b6a946a7023c40d08491d57765f188c572ad31abeeb41a)" Jan 07 09:12:31 managed-node2 /usr/bin/podman[24930]: time="2025-01-07T09:12:31-05:00" level=debug msg="Setting custom database backend: \"sqlite\"" Jan 07 09:12:31 managed-node2 /usr/bin/podman[24930]: time="2025-01-07T09:12:31-05:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" Jan 07 09:12:31 managed-node2 /usr/bin/podman[24930]: time="2025-01-07T09:12:31-05:00" level=info msg="Using sqlite as database backend" Jan 07 09:12:31 managed-node2 /usr/bin/podman[24930]: time="2025-01-07T09:12:31-05:00" level=debug msg="systemd-logind: Unknown object '/'." Jan 07 09:12:31 managed-node2 /usr/bin/podman[24930]: time="2025-01-07T09:12:31-05:00" level=debug msg="Using graph driver overlay" Jan 07 09:12:31 managed-node2 /usr/bin/podman[24930]: time="2025-01-07T09:12:31-05:00" level=debug msg="Using graph root /home/podman_basic_user/.local/share/containers/storage" Jan 07 09:12:31 managed-node2 /usr/bin/podman[24930]: time="2025-01-07T09:12:31-05:00" level=debug msg="Using run root /run/user/3001/containers" Jan 07 09:12:31 managed-node2 /usr/bin/podman[24930]: time="2025-01-07T09:12:31-05:00" level=debug msg="Using static dir /home/podman_basic_user/.local/share/containers/storage/libpod" Jan 07 09:12:31 managed-node2 /usr/bin/podman[24930]: time="2025-01-07T09:12:31-05:00" level=debug msg="Using tmp dir /run/user/3001/libpod/tmp" Jan 07 09:12:31 managed-node2 /usr/bin/podman[24930]: time="2025-01-07T09:12:31-05:00" level=debug msg="Using volume path /home/podman_basic_user/.local/share/containers/storage/volumes" Jan 07 09:12:31 managed-node2 /usr/bin/podman[24930]: time="2025-01-07T09:12:31-05:00" level=debug msg="Using transient store: false" Jan 07 09:12:31 managed-node2 systemd[23047]: Stopping libpod-conmon-601607edc0bad5ac48b6a946a7023c40d08491d57765f188c572ad31abeeb41a.scope... ░░ Subject: A stop job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has begun execution. ░░ ░░ The job identifier is 85. Jan 07 09:12:31 managed-node2 systemd[23047]: Stopped libpod-conmon-601607edc0bad5ac48b6a946a7023c40d08491d57765f188c572ad31abeeb41a.scope. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 85 and the job result is done. Jan 07 09:12:31 managed-node2 systemd[23047]: Removed slice user-libpod_pod_7ee39644f44c212ed8c9468fbb32c7003e71e8cf99bc08bae81d105ea55f94f3.slice - cgroup user-libpod_pod_7ee39644f44c212ed8c9468fbb32c7003e71e8cf99bc08bae81d105ea55f94f3.slice. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 84 and the job result is done. Jan 07 09:12:31 managed-node2 systemd[23047]: user-libpod_pod_7ee39644f44c212ed8c9468fbb32c7003e71e8cf99bc08bae81d105ea55f94f3.slice: Failed to open /run/user/3001/systemd/transient/user-libpod_pod_7ee39644f44c212ed8c9468fbb32c7003e71e8cf99bc08bae81d105ea55f94f3.slice: No such file or directory Jan 07 09:12:31 managed-node2 podman[24901]: Pods stopped: Jan 07 09:12:31 managed-node2 podman[24901]: 7ee39644f44c212ed8c9468fbb32c7003e71e8cf99bc08bae81d105ea55f94f3 Jan 07 09:12:31 managed-node2 podman[24901]: Pods removed: Jan 07 09:12:31 managed-node2 podman[24901]: 7ee39644f44c212ed8c9468fbb32c7003e71e8cf99bc08bae81d105ea55f94f3 Jan 07 09:12:31 managed-node2 podman[24901]: Secrets removed: Jan 07 09:12:31 managed-node2 podman[24901]: Volumes removed: Jan 07 09:12:31 managed-node2 systemd[23047]: Created slice user-libpod_pod_6fc0f10f49f3252eab0b8d03ec2b15b5707dcb0fe0cd0847c0f6dd05fdbde185.slice - cgroup user-libpod_pod_6fc0f10f49f3252eab0b8d03ec2b15b5707dcb0fe0cd0847c0f6dd05fdbde185.slice. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 86. Jan 07 09:12:31 managed-node2 systemd[23047]: Started libpod-c63a6bf4648719515a4699103a0db4aec20870161a7b837e1f996172cb7c0a19.scope - libcrun container. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 90. Jan 07 09:12:31 managed-node2 systemd[23047]: Started rootless-netns-6598f252.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 94. Jan 07 09:12:31 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Jan 07 09:12:31 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Jan 07 09:12:31 managed-node2 kernel: veth0: entered allmulticast mode Jan 07 09:12:31 managed-node2 kernel: veth0: entered promiscuous mode Jan 07 09:12:31 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Jan 07 09:12:31 managed-node2 kernel: podman1: port 1(veth0) entered forwarding state Jan 07 09:12:31 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Jan 07 09:12:31 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Jan 07 09:12:31 managed-node2 kernel: podman1: port 1(veth0) entered forwarding state Jan 07 09:12:31 managed-node2 systemd[23047]: Started run-r263f95b073e04414b82b4f3cc94fc83d.scope - /usr/libexec/podman/aardvark-dns --config /run/user/3001/containers/networks/aardvark-dns -p 53 run. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 98. Jan 07 09:12:31 managed-node2 systemd[23047]: Started libpod-845db18e0a114d44bb99b856f4c82915865862ec0a3de9e09c1e3addba107323.scope - libcrun container. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 102. Jan 07 09:12:31 managed-node2 systemd[23047]: Started libpod-23f6f23a68db521dffb7a208e5841f2c5ef904642c86114498c6544bb891d417.scope - libcrun container. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 107. Jan 07 09:12:31 managed-node2 podman[24901]: Pod: Jan 07 09:12:31 managed-node2 podman[24901]: 6fc0f10f49f3252eab0b8d03ec2b15b5707dcb0fe0cd0847c0f6dd05fdbde185 Jan 07 09:12:31 managed-node2 podman[24901]: Container: Jan 07 09:12:31 managed-node2 podman[24901]: 23f6f23a68db521dffb7a208e5841f2c5ef904642c86114498c6544bb891d417 Jan 07 09:12:31 managed-node2 systemd[23047]: Started podman-kube@-home-podman_basic_user-.config-containers-ansible\x2dkubernetes.d-httpd1.yml.service - A template for running K8s workloads via podman-kube-play. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 71. Jan 07 09:12:31 managed-node2 sudo[24895]: pam_unix(sudo:session): session closed for user podman_basic_user Jan 07 09:12:32 managed-node2 python3.12[25117]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None Jan 07 09:12:33 managed-node2 python3.12[25249]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 07 09:12:34 managed-node2 python3.12[25382]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/httpd2.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 07 09:12:36 managed-node2 python3.12[25514]: ansible-file Invoked with path=/tmp/lsr_kom0gs4j_podman/httpd2 state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Jan 07 09:12:36 managed-node2 python3.12[25645]: ansible-file Invoked with path=/tmp/lsr_kom0gs4j_podman/httpd2-create state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Jan 07 09:12:37 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 07 09:12:37 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 07 09:12:38 managed-node2 podman[25806]: 2025-01-07 09:12:38.971262362 -0500 EST m=+1.698124942 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610 Jan 07 09:12:38 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 07 09:12:39 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 07 09:12:39 managed-node2 python3.12[25951]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 07 09:12:39 managed-node2 python3.12[26082]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 07 09:12:40 managed-node2 python3.12[26213]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 07 09:12:40 managed-node2 python3.12[26318]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/ansible-kubernetes.d/httpd2.yml owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1736259160.0679595-16278-120616227644317/.source.yml _original_basename=.tnnbkt8f follow=False checksum=a3cc229a74f6c618c5c624f29b34a65abdf49afb backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jan 07 09:12:41 managed-node2 python3.12[26449]: ansible-containers.podman.podman_play Invoked with state=started debug=True log_level=debug kube_file=/etc/containers/ansible-kubernetes.d/httpd2.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None quiet=None recreate=None userns=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Jan 07 09:12:41 managed-node2 systemd[1]: Created slice machine-libpod_pod_8f0fc920afcf00e279a9b03adf320d9dc3479f7fd5a25611aee0e7dfa8dddc68.slice - cgroup machine-libpod_pod_8f0fc920afcf00e279a9b03adf320d9dc3479f7fd5a25611aee0e7dfa8dddc68.slice. ░░ Subject: A start job for unit machine-libpod_pod_8f0fc920afcf00e279a9b03adf320d9dc3479f7fd5a25611aee0e7dfa8dddc68.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit machine-libpod_pod_8f0fc920afcf00e279a9b03adf320d9dc3479f7fd5a25611aee0e7dfa8dddc68.slice has finished successfully. ░░ ░░ The job identifier is 2155. Jan 07 09:12:41 managed-node2 podman[26456]: 2025-01-07 09:12:41.252793882 -0500 EST m=+0.070623561 container create ae0a056053dd5ad5a2effac7e82699f32fe04138febd524d2465eda1dd4568ea (image=localhost/podman-pause:5.3.1-1733097600, name=8f0fc920afcf-infra, pod_id=8f0fc920afcf00e279a9b03adf320d9dc3479f7fd5a25611aee0e7dfa8dddc68, io.buildah.version=1.38.0) Jan 07 09:12:41 managed-node2 podman[26456]: 2025-01-07 09:12:41.257483181 -0500 EST m=+0.075312775 pod create 8f0fc920afcf00e279a9b03adf320d9dc3479f7fd5a25611aee0e7dfa8dddc68 (image=, name=httpd2) Jan 07 09:12:41 managed-node2 podman[26456]: 2025-01-07 09:12:41.283955008 -0500 EST m=+0.101784609 container create c681c2767734a4dfc45b77adb3f727c394f2eb04b4e1f9b759299b366d1e80c3 (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=8f0fc920afcf00e279a9b03adf320d9dc3479f7fd5a25611aee0e7dfa8dddc68, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, app=test, io.containers.autoupdate=registry) Jan 07 09:12:41 managed-node2 podman[26456]: 2025-01-07 09:12:41.260210262 -0500 EST m=+0.078039902 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610 Jan 07 09:12:41 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Jan 07 09:12:41 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Jan 07 09:12:41 managed-node2 kernel: veth0: entered allmulticast mode Jan 07 09:12:41 managed-node2 kernel: veth0: entered promiscuous mode Jan 07 09:12:41 managed-node2 NetworkManager[780]: [1736259161.3096] manager: (podman1): new Bridge device (/org/freedesktop/NetworkManager/Devices/3) Jan 07 09:12:41 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Jan 07 09:12:41 managed-node2 kernel: podman1: port 1(veth0) entered forwarding state Jan 07 09:12:41 managed-node2 NetworkManager[780]: [1736259161.3132] device (podman1): carrier: link connected Jan 07 09:12:41 managed-node2 NetworkManager[780]: [1736259161.3156] device (veth0): carrier: link connected Jan 07 09:12:41 managed-node2 NetworkManager[780]: [1736259161.3159] manager: (veth0): new Veth device (/org/freedesktop/NetworkManager/Devices/4) Jan 07 09:12:41 managed-node2 (udev-worker)[26472]: Network interface NamePolicy= disabled on kernel command line. Jan 07 09:12:41 managed-node2 (udev-worker)[26471]: Network interface NamePolicy= disabled on kernel command line. Jan 07 09:12:41 managed-node2 NetworkManager[780]: [1736259161.3747] device (podman1): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external') Jan 07 09:12:41 managed-node2 NetworkManager[780]: [1736259161.3752] device (podman1): state change: unavailable -> disconnected (reason 'connection-assumed', managed-type: 'external') Jan 07 09:12:41 managed-node2 NetworkManager[780]: [1736259161.3757] device (podman1): Activation: starting connection 'podman1' (34db12e8-8c9b-44ab-93ea-b15259092d20) Jan 07 09:12:41 managed-node2 NetworkManager[780]: [1736259161.3772] device (podman1): state change: disconnected -> prepare (reason 'none', managed-type: 'external') Jan 07 09:12:41 managed-node2 NetworkManager[780]: [1736259161.3774] device (podman1): state change: prepare -> config (reason 'none', managed-type: 'external') Jan 07 09:12:41 managed-node2 NetworkManager[780]: [1736259161.3776] device (podman1): state change: config -> ip-config (reason 'none', managed-type: 'external') Jan 07 09:12:41 managed-node2 NetworkManager[780]: [1736259161.3778] device (podman1): state change: ip-config -> ip-check (reason 'none', managed-type: 'external') Jan 07 09:12:41 managed-node2 systemd[1]: Started run-rf869b629f5524c4e8aba6919e24741dc.scope - /usr/libexec/podman/aardvark-dns --config /run/containers/networks/aardvark-dns -p 53 run. ░░ Subject: A start job for unit run-rf869b629f5524c4e8aba6919e24741dc.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit run-rf869b629f5524c4e8aba6919e24741dc.scope has finished successfully. ░░ ░░ The job identifier is 2241. Jan 07 09:12:41 managed-node2 systemd[1]: Starting NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service... ░░ Subject: A start job for unit NetworkManager-dispatcher.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has begun execution. ░░ ░░ The job identifier is 2162. Jan 07 09:12:41 managed-node2 aardvark-dns[26496]: starting aardvark on a child with pid 26498 Jan 07 09:12:41 managed-node2 aardvark-dns[26498]: Successfully parsed config Jan 07 09:12:41 managed-node2 aardvark-dns[26498]: Listen v4 ip {"podman-default-kube-network": [10.89.0.1]} Jan 07 09:12:41 managed-node2 aardvark-dns[26498]: Listen v6 ip {} Jan 07 09:12:41 managed-node2 aardvark-dns[26498]: Using the following upstream servers: [10.29.169.13:53, 10.29.170.12:53, 10.2.32.1:53] Jan 07 09:12:41 managed-node2 systemd[1]: Started NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service. ░░ Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has finished successfully. ░░ ░░ The job identifier is 2162. Jan 07 09:12:41 managed-node2 NetworkManager[780]: [1736259161.4427] device (podman1): state change: ip-check -> secondaries (reason 'none', managed-type: 'external') Jan 07 09:12:41 managed-node2 NetworkManager[780]: [1736259161.4444] device (podman1): state change: secondaries -> activated (reason 'none', managed-type: 'external') Jan 07 09:12:41 managed-node2 NetworkManager[780]: [1736259161.4449] device (podman1): Activation: successful, device activated. Jan 07 09:12:41 managed-node2 systemd[1]: Started libpod-conmon-ae0a056053dd5ad5a2effac7e82699f32fe04138febd524d2465eda1dd4568ea.scope. ░░ Subject: A start job for unit libpod-conmon-ae0a056053dd5ad5a2effac7e82699f32fe04138febd524d2465eda1dd4568ea.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-conmon-ae0a056053dd5ad5a2effac7e82699f32fe04138febd524d2465eda1dd4568ea.scope has finished successfully. ░░ ░░ The job identifier is 2247. Jan 07 09:12:41 managed-node2 conmon[26512]: conmon ae0a056053dd5ad5a2ef : addr{sun_family=AF_UNIX, sun_path=/proc/self/fd/13/attach} Jan 07 09:12:41 managed-node2 conmon[26512]: conmon ae0a056053dd5ad5a2ef : terminal_ctrl_fd: 13 Jan 07 09:12:41 managed-node2 conmon[26512]: conmon ae0a056053dd5ad5a2ef : winsz read side: 17, winsz write side: 18 Jan 07 09:12:41 managed-node2 systemd[1]: Started libpod-ae0a056053dd5ad5a2effac7e82699f32fe04138febd524d2465eda1dd4568ea.scope - libcrun container. ░░ Subject: A start job for unit libpod-ae0a056053dd5ad5a2effac7e82699f32fe04138febd524d2465eda1dd4568ea.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-ae0a056053dd5ad5a2effac7e82699f32fe04138febd524d2465eda1dd4568ea.scope has finished successfully. ░░ ░░ The job identifier is 2254. Jan 07 09:12:41 managed-node2 conmon[26512]: conmon ae0a056053dd5ad5a2ef : container PID: 26514 Jan 07 09:12:41 managed-node2 podman[26456]: 2025-01-07 09:12:41.507460159 -0500 EST m=+0.325289761 container init ae0a056053dd5ad5a2effac7e82699f32fe04138febd524d2465eda1dd4568ea (image=localhost/podman-pause:5.3.1-1733097600, name=8f0fc920afcf-infra, pod_id=8f0fc920afcf00e279a9b03adf320d9dc3479f7fd5a25611aee0e7dfa8dddc68, io.buildah.version=1.38.0) Jan 07 09:12:41 managed-node2 podman[26456]: 2025-01-07 09:12:41.511387783 -0500 EST m=+0.329217447 container start ae0a056053dd5ad5a2effac7e82699f32fe04138febd524d2465eda1dd4568ea (image=localhost/podman-pause:5.3.1-1733097600, name=8f0fc920afcf-infra, pod_id=8f0fc920afcf00e279a9b03adf320d9dc3479f7fd5a25611aee0e7dfa8dddc68, io.buildah.version=1.38.0) Jan 07 09:12:41 managed-node2 systemd[1]: Started libpod-conmon-c681c2767734a4dfc45b77adb3f727c394f2eb04b4e1f9b759299b366d1e80c3.scope. ░░ Subject: A start job for unit libpod-conmon-c681c2767734a4dfc45b77adb3f727c394f2eb04b4e1f9b759299b366d1e80c3.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-conmon-c681c2767734a4dfc45b77adb3f727c394f2eb04b4e1f9b759299b366d1e80c3.scope has finished successfully. ░░ ░░ The job identifier is 2261. Jan 07 09:12:41 managed-node2 conmon[26517]: conmon c681c2767734a4dfc45b : addr{sun_family=AF_UNIX, sun_path=/proc/self/fd/12/attach} Jan 07 09:12:41 managed-node2 conmon[26517]: conmon c681c2767734a4dfc45b : terminal_ctrl_fd: 12 Jan 07 09:12:41 managed-node2 conmon[26517]: conmon c681c2767734a4dfc45b : winsz read side: 16, winsz write side: 17 Jan 07 09:12:41 managed-node2 systemd[1]: Started libpod-c681c2767734a4dfc45b77adb3f727c394f2eb04b4e1f9b759299b366d1e80c3.scope - libcrun container. ░░ Subject: A start job for unit libpod-c681c2767734a4dfc45b77adb3f727c394f2eb04b4e1f9b759299b366d1e80c3.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-c681c2767734a4dfc45b77adb3f727c394f2eb04b4e1f9b759299b366d1e80c3.scope has finished successfully. ░░ ░░ The job identifier is 2268. Jan 07 09:12:41 managed-node2 conmon[26517]: conmon c681c2767734a4dfc45b : container PID: 26519 Jan 07 09:12:41 managed-node2 podman[26456]: 2025-01-07 09:12:41.574978664 -0500 EST m=+0.392808281 container init c681c2767734a4dfc45b77adb3f727c394f2eb04b4e1f9b759299b366d1e80c3 (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=8f0fc920afcf00e279a9b03adf320d9dc3479f7fd5a25611aee0e7dfa8dddc68, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, app=test) Jan 07 09:12:41 managed-node2 podman[26456]: 2025-01-07 09:12:41.577467831 -0500 EST m=+0.395297502 container start c681c2767734a4dfc45b77adb3f727c394f2eb04b4e1f9b759299b366d1e80c3 (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=8f0fc920afcf00e279a9b03adf320d9dc3479f7fd5a25611aee0e7dfa8dddc68, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, app=test) Jan 07 09:12:41 managed-node2 podman[26456]: 2025-01-07 09:12:41.582818566 -0500 EST m=+0.400648090 pod start 8f0fc920afcf00e279a9b03adf320d9dc3479f7fd5a25611aee0e7dfa8dddc68 (image=, name=httpd2) Jan 07 09:12:41 managed-node2 python3.12[26449]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE command: /usr/bin/podman play kube --start=true --log-level=debug /etc/containers/ansible-kubernetes.d/httpd2.yml Jan 07 09:12:41 managed-node2 python3.12[26449]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stdout: Pod: 8f0fc920afcf00e279a9b03adf320d9dc3479f7fd5a25611aee0e7dfa8dddc68 Container: c681c2767734a4dfc45b77adb3f727c394f2eb04b4e1f9b759299b366d1e80c3 Jan 07 09:12:41 managed-node2 python3.12[26449]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stderr: time="2025-01-07T09:12:41-05:00" level=info msg="/usr/bin/podman filtering at log level debug" time="2025-01-07T09:12:41-05:00" level=debug msg="Called kube.PersistentPreRunE(/usr/bin/podman play kube --start=true --log-level=debug /etc/containers/ansible-kubernetes.d/httpd2.yml)" time="2025-01-07T09:12:41-05:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" time="2025-01-07T09:12:41-05:00" level=info msg="Using sqlite as database backend" time="2025-01-07T09:12:41-05:00" level=debug msg="Using graph driver overlay" time="2025-01-07T09:12:41-05:00" level=debug msg="Using graph root /var/lib/containers/storage" time="2025-01-07T09:12:41-05:00" level=debug msg="Using run root /run/containers/storage" time="2025-01-07T09:12:41-05:00" level=debug msg="Using static dir /var/lib/containers/storage/libpod" time="2025-01-07T09:12:41-05:00" level=debug msg="Using tmp dir /run/libpod" time="2025-01-07T09:12:41-05:00" level=debug msg="Using volume path /var/lib/containers/storage/volumes" time="2025-01-07T09:12:41-05:00" level=debug msg="Using transient store: false" time="2025-01-07T09:12:41-05:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" time="2025-01-07T09:12:41-05:00" level=debug msg="Cached value indicated that overlay is supported" time="2025-01-07T09:12:41-05:00" level=debug msg="Cached value indicated that overlay is supported" time="2025-01-07T09:12:41-05:00" level=debug msg="Cached value indicated that metacopy is being used" time="2025-01-07T09:12:41-05:00" level=debug msg="Cached value indicated that native-diff is not being used" time="2025-01-07T09:12:41-05:00" level=info msg="Not using native diff for overlay, this may cause degraded performance for building images: kernel has CONFIG_OVERLAY_FS_REDIRECT_DIR enabled" time="2025-01-07T09:12:41-05:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=false, usingMetacopy=true" time="2025-01-07T09:12:41-05:00" level=debug msg="Initializing event backend journald" time="2025-01-07T09:12:41-05:00" level=debug msg="Configured OCI runtime crun-vm initialization failed: no valid executable found for OCI runtime crun-vm: invalid argument" time="2025-01-07T09:12:41-05:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" time="2025-01-07T09:12:41-05:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" time="2025-01-07T09:12:41-05:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" time="2025-01-07T09:12:41-05:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" time="2025-01-07T09:12:41-05:00" level=debug msg="Configured OCI runtime runc initialization failed: no valid executable found for OCI runtime runc: invalid argument" time="2025-01-07T09:12:41-05:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" time="2025-01-07T09:12:41-05:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" time="2025-01-07T09:12:41-05:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" time="2025-01-07T09:12:41-05:00" level=debug msg="Using OCI runtime \"/usr/bin/crun\"" time="2025-01-07T09:12:41-05:00" level=info msg="Setting parallel job count to 7" time="2025-01-07T09:12:41-05:00" level=debug msg="Successfully loaded network podman-default-kube-network: &{podman-default-kube-network fe17f9641389980d9286eb85f735b0683746d93051a2476303460ddb3b1d2db9 bridge podman1 2025-01-07 09:10:18.933530103 -0500 EST [{{{10.89.0.0 ffffff00}} 10.89.0.1 }] [] false false true [] map[] map[] map[driver:host-local]}" time="2025-01-07T09:12:41-05:00" level=debug msg="Successfully loaded 2 networks" time="2025-01-07T09:12:41-05:00" level=debug msg="Looking up image \"localhost/podman-pause:5.3.1-1733097600\" in local containers storage" time="2025-01-07T09:12:41-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-01-07T09:12:41-05:00" level=debug msg="Trying \"localhost/podman-pause:5.3.1-1733097600\" ..." time="2025-01-07T09:12:41-05:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@df073f11d00fb4b98ef78dc488ede5a74765d3c444d6f30ab4429fc493278927\"" time="2025-01-07T09:12:41-05:00" level=debug msg="Found image \"localhost/podman-pause:5.3.1-1733097600\" as \"localhost/podman-pause:5.3.1-1733097600\" in local containers storage" time="2025-01-07T09:12:41-05:00" level=debug msg="Found image \"localhost/podman-pause:5.3.1-1733097600\" as \"localhost/podman-pause:5.3.1-1733097600\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@df073f11d00fb4b98ef78dc488ede5a74765d3c444d6f30ab4429fc493278927)" time="2025-01-07T09:12:41-05:00" level=debug msg="exporting opaque data as blob \"sha256:df073f11d00fb4b98ef78dc488ede5a74765d3c444d6f30ab4429fc493278927\"" time="2025-01-07T09:12:41-05:00" level=debug msg="Pod using bridge network mode" time="2025-01-07T09:12:41-05:00" level=debug msg="Created cgroup path machine.slice/machine-libpod_pod_8f0fc920afcf00e279a9b03adf320d9dc3479f7fd5a25611aee0e7dfa8dddc68.slice for parent machine.slice and name libpod_pod_8f0fc920afcf00e279a9b03adf320d9dc3479f7fd5a25611aee0e7dfa8dddc68" time="2025-01-07T09:12:41-05:00" level=debug msg="Created cgroup machine.slice/machine-libpod_pod_8f0fc920afcf00e279a9b03adf320d9dc3479f7fd5a25611aee0e7dfa8dddc68.slice" time="2025-01-07T09:12:41-05:00" level=debug msg="Got pod cgroup as machine.slice/machine-libpod_pod_8f0fc920afcf00e279a9b03adf320d9dc3479f7fd5a25611aee0e7dfa8dddc68.slice" time="2025-01-07T09:12:41-05:00" level=debug msg="Looking up image \"localhost/podman-pause:5.3.1-1733097600\" in local containers storage" time="2025-01-07T09:12:41-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-01-07T09:12:41-05:00" level=debug msg="Trying \"localhost/podman-pause:5.3.1-1733097600\" ..." time="2025-01-07T09:12:41-05:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@df073f11d00fb4b98ef78dc488ede5a74765d3c444d6f30ab4429fc493278927\"" time="2025-01-07T09:12:41-05:00" level=debug msg="Found image \"localhost/podman-pause:5.3.1-1733097600\" as \"localhost/podman-pause:5.3.1-1733097600\" in local containers storage" time="2025-01-07T09:12:41-05:00" level=debug msg="Found image \"localhost/podman-pause:5.3.1-1733097600\" as \"localhost/podman-pause:5.3.1-1733097600\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@df073f11d00fb4b98ef78dc488ede5a74765d3c444d6f30ab4429fc493278927)" time="2025-01-07T09:12:41-05:00" level=debug msg="exporting opaque data as blob \"sha256:df073f11d00fb4b98ef78dc488ede5a74765d3c444d6f30ab4429fc493278927\"" time="2025-01-07T09:12:41-05:00" level=debug msg="Inspecting image df073f11d00fb4b98ef78dc488ede5a74765d3c444d6f30ab4429fc493278927" time="2025-01-07T09:12:41-05:00" level=debug msg="exporting opaque data as blob \"sha256:df073f11d00fb4b98ef78dc488ede5a74765d3c444d6f30ab4429fc493278927\"" time="2025-01-07T09:12:41-05:00" level=debug msg="Inspecting image df073f11d00fb4b98ef78dc488ede5a74765d3c444d6f30ab4429fc493278927" time="2025-01-07T09:12:41-05:00" level=debug msg="Inspecting image df073f11d00fb4b98ef78dc488ede5a74765d3c444d6f30ab4429fc493278927" time="2025-01-07T09:12:41-05:00" level=debug msg="using systemd mode: false" time="2025-01-07T09:12:41-05:00" level=debug msg="setting container name 8f0fc920afcf-infra" time="2025-01-07T09:12:41-05:00" level=debug msg="Loading seccomp profile from \"/usr/share/containers/seccomp.json\"" time="2025-01-07T09:12:41-05:00" level=debug msg="Allocated lock 1 for container ae0a056053dd5ad5a2effac7e82699f32fe04138febd524d2465eda1dd4568ea" time="2025-01-07T09:12:41-05:00" level=debug msg="exporting opaque data as blob \"sha256:df073f11d00fb4b98ef78dc488ede5a74765d3c444d6f30ab4429fc493278927\"" time="2025-01-07T09:12:41-05:00" level=debug msg="Cached value indicated that idmapped mounts for overlay are supported" time="2025-01-07T09:12:41-05:00" level=debug msg="Created container \"ae0a056053dd5ad5a2effac7e82699f32fe04138febd524d2465eda1dd4568ea\"" time="2025-01-07T09:12:41-05:00" level=debug msg="Container \"ae0a056053dd5ad5a2effac7e82699f32fe04138febd524d2465eda1dd4568ea\" has work directory \"/var/lib/containers/storage/overlay-containers/ae0a056053dd5ad5a2effac7e82699f32fe04138febd524d2465eda1dd4568ea/userdata\"" time="2025-01-07T09:12:41-05:00" level=debug msg="Container \"ae0a056053dd5ad5a2effac7e82699f32fe04138febd524d2465eda1dd4568ea\" has run directory \"/run/containers/storage/overlay-containers/ae0a056053dd5ad5a2effac7e82699f32fe04138febd524d2465eda1dd4568ea/userdata\"" time="2025-01-07T09:12:41-05:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-01-07T09:12:41-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-01-07T09:12:41-05:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2025-01-07T09:12:41-05:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-07T09:12:41-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-01-07T09:12:41-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2025-01-07T09:12:41-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-07T09:12:41-05:00" level=debug msg="Pulling image quay.io/libpod/testimage:20210610 (policy: missing)" time="2025-01-07T09:12:41-05:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-01-07T09:12:41-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-01-07T09:12:41-05:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2025-01-07T09:12:41-05:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-07T09:12:41-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-01-07T09:12:41-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2025-01-07T09:12:41-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-07T09:12:41-05:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-01-07T09:12:41-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-01-07T09:12:41-05:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2025-01-07T09:12:41-05:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-07T09:12:41-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-01-07T09:12:41-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2025-01-07T09:12:41-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-07T09:12:41-05:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2025-01-07T09:12:41-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-07T09:12:41-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-07T09:12:41-05:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-01-07T09:12:41-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-01-07T09:12:41-05:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2025-01-07T09:12:41-05:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-07T09:12:41-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-01-07T09:12:41-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2025-01-07T09:12:41-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-07T09:12:41-05:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2025-01-07T09:12:41-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-07T09:12:41-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-07T09:12:41-05:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2025-01-07T09:12:41-05:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2025-01-07T09:12:41-05:00" level=debug msg="using systemd mode: false" time="2025-01-07T09:12:41-05:00" level=debug msg="adding container to pod httpd2" time="2025-01-07T09:12:41-05:00" level=debug msg="setting container name httpd2-httpd2" time="2025-01-07T09:12:41-05:00" level=debug msg="Loading seccomp profile from \"/usr/share/containers/seccomp.json\"" time="2025-01-07T09:12:41-05:00" level=info msg="Sysctl net.ipv4.ping_group_range=0 0 ignored in containers.conf, since Network Namespace set to host" time="2025-01-07T09:12:41-05:00" level=debug msg="Adding mount /proc" time="2025-01-07T09:12:41-05:00" level=debug msg="Adding mount /dev" time="2025-01-07T09:12:41-05:00" level=debug msg="Adding mount /dev/pts" time="2025-01-07T09:12:41-05:00" level=debug msg="Adding mount /dev/mqueue" time="2025-01-07T09:12:41-05:00" level=debug msg="Adding mount /sys" time="2025-01-07T09:12:41-05:00" level=debug msg="Adding mount /sys/fs/cgroup" time="2025-01-07T09:12:41-05:00" level=debug msg="Allocated lock 2 for container c681c2767734a4dfc45b77adb3f727c394f2eb04b4e1f9b759299b366d1e80c3" time="2025-01-07T09:12:41-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-07T09:12:41-05:00" level=debug msg="Created container \"c681c2767734a4dfc45b77adb3f727c394f2eb04b4e1f9b759299b366d1e80c3\"" time="2025-01-07T09:12:41-05:00" level=debug msg="Container \"c681c2767734a4dfc45b77adb3f727c394f2eb04b4e1f9b759299b366d1e80c3\" has work directory \"/var/lib/containers/storage/overlay-containers/c681c2767734a4dfc45b77adb3f727c394f2eb04b4e1f9b759299b366d1e80c3/userdata\"" time="2025-01-07T09:12:41-05:00" level=debug msg="Container \"c681c2767734a4dfc45b77adb3f727c394f2eb04b4e1f9b759299b366d1e80c3\" has run directory \"/run/containers/storage/overlay-containers/c681c2767734a4dfc45b77adb3f727c394f2eb04b4e1f9b759299b366d1e80c3/userdata\"" time="2025-01-07T09:12:41-05:00" level=debug msg="Strongconnecting node ae0a056053dd5ad5a2effac7e82699f32fe04138febd524d2465eda1dd4568ea" time="2025-01-07T09:12:41-05:00" level=debug msg="Pushed ae0a056053dd5ad5a2effac7e82699f32fe04138febd524d2465eda1dd4568ea onto stack" time="2025-01-07T09:12:41-05:00" level=debug msg="Finishing node ae0a056053dd5ad5a2effac7e82699f32fe04138febd524d2465eda1dd4568ea. Popped ae0a056053dd5ad5a2effac7e82699f32fe04138febd524d2465eda1dd4568ea off stack" time="2025-01-07T09:12:41-05:00" level=debug msg="Strongconnecting node c681c2767734a4dfc45b77adb3f727c394f2eb04b4e1f9b759299b366d1e80c3" time="2025-01-07T09:12:41-05:00" level=debug msg="Pushed c681c2767734a4dfc45b77adb3f727c394f2eb04b4e1f9b759299b366d1e80c3 onto stack" time="2025-01-07T09:12:41-05:00" level=debug msg="Finishing node c681c2767734a4dfc45b77adb3f727c394f2eb04b4e1f9b759299b366d1e80c3. Popped c681c2767734a4dfc45b77adb3f727c394f2eb04b4e1f9b759299b366d1e80c3 off stack" time="2025-01-07T09:12:41-05:00" level=debug msg="overlay: mount_data=lowerdir=/var/lib/containers/storage/overlay/l/WKGQ2CQQTFXYEKRDTAEHJQTPHF,upperdir=/var/lib/containers/storage/overlay/281ea9e93260ff02b0254fe28758f17f91023cd829478d2bd5882af4927740e2/diff,workdir=/var/lib/containers/storage/overlay/281ea9e93260ff02b0254fe28758f17f91023cd829478d2bd5882af4927740e2/work,nodev,metacopy=on,context=\"system_u:object_r:container_file_t:s0:c720,c783\"" time="2025-01-07T09:12:41-05:00" level=debug msg="Mounted container \"ae0a056053dd5ad5a2effac7e82699f32fe04138febd524d2465eda1dd4568ea\" at \"/var/lib/containers/storage/overlay/281ea9e93260ff02b0254fe28758f17f91023cd829478d2bd5882af4927740e2/merged\"" time="2025-01-07T09:12:41-05:00" level=debug msg="Made network namespace at /run/netns/netns-ca2332a8-ba7a-e980-636a-50c016800d1d for container ae0a056053dd5ad5a2effac7e82699f32fe04138febd524d2465eda1dd4568ea" time="2025-01-07T09:12:41-05:00" level=debug msg="Created root filesystem for container ae0a056053dd5ad5a2effac7e82699f32fe04138febd524d2465eda1dd4568ea at /var/lib/containers/storage/overlay/281ea9e93260ff02b0254fe28758f17f91023cd829478d2bd5882af4927740e2/merged" [DEBUG netavark::network::validation] Validating network namespace... [DEBUG netavark::commands::setup] Setting up... [INFO netavark::firewall] Using nftables firewall driver [DEBUG netavark::network::bridge] Setup network podman-default-kube-network [DEBUG netavark::network::bridge] Container interface name: eth0 with IP addresses [10.89.0.2/24] [DEBUG netavark::network::bridge] Bridge name: podman1 with IP addresses [10.89.0.1/24] [DEBUG netavark::network::core_utils] Setting sysctl value for net.ipv4.ip_forward to 1 [DEBUG netavark::network::core_utils] Setting sysctl value for /proc/sys/net/ipv4/conf/podman1/rp_filter to 2 [DEBUG netavark::network::core_utils] Setting sysctl value for /proc/sys/net/ipv6/conf/eth0/autoconf to 0 [DEBUG netavark::network::core_utils] Setting sysctl value for /proc/sys/net/ipv4/conf/eth0/arp_notify to 1 [DEBUG netavark::network::core_utils] Setting sysctl value for /proc/sys/net/ipv4/conf/eth0/rp_filter to 2 [INFO netavark::network::netlink] Adding route (dest: 0.0.0.0/0 ,gw: 10.89.0.1, metric 100) [DEBUG netavark::firewall::firewalld] Adding firewalld rules for network 10.89.0.0/24 [DEBUG netavark::firewall::firewalld] Adding subnet 10.89.0.0/24 to zone trusted as source [INFO netavark::firewall::nft] Creating container chain nv_fe17f964_10_89_0_0_nm24 [DEBUG netavark::network::core_utils] Setting sysctl value for net.ipv4.conf.podman1.route_localnet to 1 [DEBUG netavark::dns::aardvark] Spawning aardvark server [DEBUG netavark::dns::aardvark] start aardvark-dns: ["systemd-run", "-q", "--scope", "/usr/libexec/podman/aardvark-dns", "--config", "/run/containers/networks/aardvark-dns", "-p", "53", "run"] [DEBUG netavark::commands::setup] { "podman-default-kube-network": StatusBlock { dns_search_domains: Some( [ "dns.podman", ], ), dns_server_ips: Some( [ 10.89.0.1, ], ), interfaces: Some( { "eth0": NetInterface { mac_address: "3e:a5:aa:c9:ad:84", subnets: Some( [ NetAddress { gateway: Some( 10.89.0.1, ), ipnet: 10.89.0.2/24, }, ], ), }, }, ), }, } [DEBUG netavark::commands::setup] Setup complete time="2025-01-07T09:12:41-05:00" level=debug msg="/proc/sys/crypto/fips_enabled does not contain '1', not adding FIPS mode bind mounts" time="2025-01-07T09:12:41-05:00" level=debug msg="Setting Cgroups for container ae0a056053dd5ad5a2effac7e82699f32fe04138febd524d2465eda1dd4568ea to machine-libpod_pod_8f0fc920afcf00e279a9b03adf320d9dc3479f7fd5a25611aee0e7dfa8dddc68.slice:libpod:ae0a056053dd5ad5a2effac7e82699f32fe04138febd524d2465eda1dd4568ea" time="2025-01-07T09:12:41-05:00" level=debug msg="reading hooks from /usr/share/containers/oci/hooks.d" time="2025-01-07T09:12:41-05:00" level=debug msg="Workdir \"/\" resolved to host path \"/var/lib/containers/storage/overlay/281ea9e93260ff02b0254fe28758f17f91023cd829478d2bd5882af4927740e2/merged\"" time="2025-01-07T09:12:41-05:00" level=debug msg="Created OCI spec for container ae0a056053dd5ad5a2effac7e82699f32fe04138febd524d2465eda1dd4568ea at /var/lib/containers/storage/overlay-containers/ae0a056053dd5ad5a2effac7e82699f32fe04138febd524d2465eda1dd4568ea/userdata/config.json" time="2025-01-07T09:12:41-05:00" level=debug msg="Created cgroup path machine.slice/machine-libpod_pod_8f0fc920afcf00e279a9b03adf320d9dc3479f7fd5a25611aee0e7dfa8dddc68.slice for parent machine.slice and name libpod_pod_8f0fc920afcf00e279a9b03adf320d9dc3479f7fd5a25611aee0e7dfa8dddc68" time="2025-01-07T09:12:41-05:00" level=debug msg="Created cgroup machine.slice/machine-libpod_pod_8f0fc920afcf00e279a9b03adf320d9dc3479f7fd5a25611aee0e7dfa8dddc68.slice" time="2025-01-07T09:12:41-05:00" level=debug msg="Got pod cgroup as machine.slice/machine-libpod_pod_8f0fc920afcf00e279a9b03adf320d9dc3479f7fd5a25611aee0e7dfa8dddc68.slice" time="2025-01-07T09:12:41-05:00" level=debug msg="/usr/bin/conmon messages will be logged to syslog" time="2025-01-07T09:12:41-05:00" level=debug msg="running conmon: /usr/bin/conmon" args="[--api-version 1 -c ae0a056053dd5ad5a2effac7e82699f32fe04138febd524d2465eda1dd4568ea -u ae0a056053dd5ad5a2effac7e82699f32fe04138febd524d2465eda1dd4568ea -r /usr/bin/crun -b /var/lib/containers/storage/overlay-containers/ae0a056053dd5ad5a2effac7e82699f32fe04138febd524d2465eda1dd4568ea/userdata -p /run/containers/storage/overlay-containers/ae0a056053dd5ad5a2effac7e82699f32fe04138febd524d2465eda1dd4568ea/userdata/pidfile -n 8f0fc920afcf-infra --exit-dir /run/libpod/exits --persist-dir /run/libpod/persist/ae0a056053dd5ad5a2effac7e82699f32fe04138febd524d2465eda1dd4568ea --full-attach -s -l k8s-file:/var/lib/containers/storage/overlay-containers/ae0a056053dd5ad5a2effac7e82699f32fe04138febd524d2465eda1dd4568ea/userdata/ctr.log --log-level debug --syslog --conmon-pidfile /run/containers/storage/overlay-containers/ae0a056053dd5ad5a2effac7e82699f32fe04138febd524d2465eda1dd4568ea/userdata/conmon.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /var/lib/containers/storage --exit-command-arg --runroot --exit-command-arg /run/containers/storage --exit-command-arg --log-level --exit-command-arg debug --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/libpod --exit-command-arg --network-config-dir --exit-command-arg --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /var/lib/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --storage-opt --exit-command-arg overlay.mountopt=nodev,metacopy=on --exit-command-arg --events-backend --exit-command-arg journald --exit-command-arg --syslog --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg ae0a056053dd5ad5a2effac7e82699f32fe04138febd524d2465eda1dd4568ea]" time="2025-01-07T09:12:41-05:00" level=info msg="Running conmon under slice machine-libpod_pod_8f0fc920afcf00e279a9b03adf320d9dc3479f7fd5a25611aee0e7dfa8dddc68.slice and unitName libpod-conmon-ae0a056053dd5ad5a2effac7e82699f32fe04138febd524d2465eda1dd4568ea.scope" time="2025-01-07T09:12:41-05:00" level=debug msg="Received: 26514" time="2025-01-07T09:12:41-05:00" level=info msg="Got Conmon PID as 26512" time="2025-01-07T09:12:41-05:00" level=debug msg="Created container ae0a056053dd5ad5a2effac7e82699f32fe04138febd524d2465eda1dd4568ea in OCI runtime" time="2025-01-07T09:12:41-05:00" level=debug msg="Adding nameserver(s) from network status of '[\"10.89.0.1\"]'" time="2025-01-07T09:12:41-05:00" level=debug msg="Adding search domain(s) from network status of '[\"dns.podman\"]'" time="2025-01-07T09:12:41-05:00" level=debug msg="Starting container ae0a056053dd5ad5a2effac7e82699f32fe04138febd524d2465eda1dd4568ea with command [/catatonit -P]" time="2025-01-07T09:12:41-05:00" level=debug msg="Started container ae0a056053dd5ad5a2effac7e82699f32fe04138febd524d2465eda1dd4568ea" time="2025-01-07T09:12:41-05:00" level=debug msg="overlay: mount_data=lowerdir=/var/lib/containers/storage/overlay/l/CXSX6DXW3XYBB4KCHC4NKHQO5P,upperdir=/var/lib/containers/storage/overlay/06c7300d6e50696d3353ec47fe77f8673402472a4e676d4268c9f47b0148eed3/diff,workdir=/var/lib/containers/storage/overlay/06c7300d6e50696d3353ec47fe77f8673402472a4e676d4268c9f47b0148eed3/work,nodev,metacopy=on,context=\"system_u:object_r:container_file_t:s0:c720,c783\"" time="2025-01-07T09:12:41-05:00" level=debug msg="Mounted container \"c681c2767734a4dfc45b77adb3f727c394f2eb04b4e1f9b759299b366d1e80c3\" at \"/var/lib/containers/storage/overlay/06c7300d6e50696d3353ec47fe77f8673402472a4e676d4268c9f47b0148eed3/merged\"" time="2025-01-07T09:12:41-05:00" level=debug msg="Created root filesystem for container c681c2767734a4dfc45b77adb3f727c394f2eb04b4e1f9b759299b366d1e80c3 at /var/lib/containers/storage/overlay/06c7300d6e50696d3353ec47fe77f8673402472a4e676d4268c9f47b0148eed3/merged" time="2025-01-07T09:12:41-05:00" level=debug msg="/proc/sys/crypto/fips_enabled does not contain '1', not adding FIPS mode bind mounts" time="2025-01-07T09:12:41-05:00" level=debug msg="Setting Cgroups for container c681c2767734a4dfc45b77adb3f727c394f2eb04b4e1f9b759299b366d1e80c3 to machine-libpod_pod_8f0fc920afcf00e279a9b03adf320d9dc3479f7fd5a25611aee0e7dfa8dddc68.slice:libpod:c681c2767734a4dfc45b77adb3f727c394f2eb04b4e1f9b759299b366d1e80c3" time="2025-01-07T09:12:41-05:00" level=debug msg="reading hooks from /usr/share/containers/oci/hooks.d" time="2025-01-07T09:12:41-05:00" level=debug msg="Workdir \"/var/www\" resolved to a volume or mount" time="2025-01-07T09:12:41-05:00" level=debug msg="Created OCI spec for container c681c2767734a4dfc45b77adb3f727c394f2eb04b4e1f9b759299b366d1e80c3 at /var/lib/containers/storage/overlay-containers/c681c2767734a4dfc45b77adb3f727c394f2eb04b4e1f9b759299b366d1e80c3/userdata/config.json" time="2025-01-07T09:12:41-05:00" level=debug msg="Created cgroup path machine.slice/machine-libpod_pod_8f0fc920afcf00e279a9b03adf320d9dc3479f7fd5a25611aee0e7dfa8dddc68.slice for parent machine.slice and name libpod_pod_8f0fc920afcf00e279a9b03adf320d9dc3479f7fd5a25611aee0e7dfa8dddc68" time="2025-01-07T09:12:41-05:00" level=debug msg="Created cgroup machine.slice/machine-libpod_pod_8f0fc920afcf00e279a9b03adf320d9dc3479f7fd5a25611aee0e7dfa8dddc68.slice" time="2025-01-07T09:12:41-05:00" level=debug msg="Got pod cgroup as machine.slice/machine-libpod_pod_8f0fc920afcf00e279a9b03adf320d9dc3479f7fd5a25611aee0e7dfa8dddc68.slice" time="2025-01-07T09:12:41-05:00" level=debug msg="/usr/bin/conmon messages will be logged to syslog" time="2025-01-07T09:12:41-05:00" level=debug msg="running conmon: /usr/bin/conmon" args="[--api-version 1 -c c681c2767734a4dfc45b77adb3f727c394f2eb04b4e1f9b759299b366d1e80c3 -u c681c2767734a4dfc45b77adb3f727c394f2eb04b4e1f9b759299b366d1e80c3 -r /usr/bin/crun -b /var/lib/containers/storage/overlay-containers/c681c2767734a4dfc45b77adb3f727c394f2eb04b4e1f9b759299b366d1e80c3/userdata -p /run/containers/storage/overlay-containers/c681c2767734a4dfc45b77adb3f727c394f2eb04b4e1f9b759299b366d1e80c3/userdata/pidfile -n httpd2-httpd2 --exit-dir /run/libpod/exits --persist-dir /run/libpod/persist/c681c2767734a4dfc45b77adb3f727c394f2eb04b4e1f9b759299b366d1e80c3 --full-attach -s -l k8s-file:/var/lib/containers/storage/overlay-containers/c681c2767734a4dfc45b77adb3f727c394f2eb04b4e1f9b759299b366d1e80c3/userdata/ctr.log --log-level debug --syslog --conmon-pidfile /run/containers/storage/overlay-containers/c681c2767734a4dfc45b77adb3f727c394f2eb04b4e1f9b759299b366d1e80c3/userdata/conmon.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /var/lib/containers/storage --exit-command-arg --runroot --exit-command-arg /run/containers/storage --exit-command-arg --log-level --exit-command-arg debug --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/libpod --exit-command-arg --network-config-dir --exit-command-arg --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /var/lib/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --storage-opt --exit-command-arg overlay.mountopt=nodev,metacopy=on --exit-command-arg --events-backend --exit-command-arg journald --exit-command-arg --syslog --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg c681c2767734a4dfc45b77adb3f727c394f2eb04b4e1f9b759299b366d1e80c3]" time="2025-01-07T09:12:41-05:00" level=info msg="Running conmon under slice machine-libpod_pod_8f0fc920afcf00e279a9b03adf320d9dc3479f7fd5a25611aee0e7dfa8dddc68.slice and unitName libpod-conmon-c681c2767734a4dfc45b77adb3f727c394f2eb04b4e1f9b759299b366d1e80c3.scope" time="2025-01-07T09:12:41-05:00" level=debug msg="Received: 26519" time="2025-01-07T09:12:41-05:00" level=info msg="Got Conmon PID as 26517" time="2025-01-07T09:12:41-05:00" level=debug msg="Created container c681c2767734a4dfc45b77adb3f727c394f2eb04b4e1f9b759299b366d1e80c3 in OCI runtime" time="2025-01-07T09:12:41-05:00" level=debug msg="Starting container c681c2767734a4dfc45b77adb3f727c394f2eb04b4e1f9b759299b366d1e80c3 with command [/bin/busybox-extras httpd -f -p 80]" time="2025-01-07T09:12:41-05:00" level=debug msg="Started container c681c2767734a4dfc45b77adb3f727c394f2eb04b4e1f9b759299b366d1e80c3" time="2025-01-07T09:12:41-05:00" level=debug msg="Called kube.PersistentPostRunE(/usr/bin/podman play kube --start=true --log-level=debug /etc/containers/ansible-kubernetes.d/httpd2.yml)" time="2025-01-07T09:12:41-05:00" level=debug msg="Shutting down engines" time="2025-01-07T09:12:41-05:00" level=info msg="Received shutdown.Stop(), terminating!" PID=26456 Jan 07 09:12:41 managed-node2 python3.12[26449]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE rc: 0 Jan 07 09:12:42 managed-node2 python3.12[26651]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 07 09:12:42 managed-node2 systemd[1]: Reload requested from client PID 26652 ('systemctl') (unit session-6.scope)... Jan 07 09:12:42 managed-node2 systemd[1]: Reloading... Jan 07 09:12:42 managed-node2 systemd[1]: Reloading finished in 198 ms. Jan 07 09:12:43 managed-node2 python3.12[26838]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service scope=system enabled=True daemon_reload=False daemon_reexec=False no_block=False state=None force=None masked=None Jan 07 09:12:43 managed-node2 systemd[1]: Reload requested from client PID 26841 ('systemctl') (unit session-6.scope)... Jan 07 09:12:43 managed-node2 systemd[1]: Reloading... Jan 07 09:12:43 managed-node2 systemd[1]: Reloading finished in 199 ms. Jan 07 09:12:43 managed-node2 systemd[1]: Starting logrotate.service - Rotate log files... ░░ Subject: A start job for unit logrotate.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit logrotate.service has begun execution. ░░ ░░ The job identifier is 2275. Jan 07 09:12:43 managed-node2 systemd[1]: logrotate.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit logrotate.service has successfully entered the 'dead' state. Jan 07 09:12:43 managed-node2 systemd[1]: Finished logrotate.service - Rotate log files. ░░ Subject: A start job for unit logrotate.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit logrotate.service has finished successfully. ░░ ░░ The job identifier is 2275. Jan 07 09:12:44 managed-node2 python3.12[27031]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Jan 07 09:12:44 managed-node2 systemd[1]: Created slice system-podman\x2dkube.slice - Slice /system/podman-kube. ░░ Subject: A start job for unit system-podman\x2dkube.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit system-podman\x2dkube.slice has finished successfully. ░░ ░░ The job identifier is 2356. Jan 07 09:12:44 managed-node2 systemd[1]: Starting podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service - A template for running K8s workloads via podman-kube-play... ░░ Subject: A start job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service has begun execution. ░░ ░░ The job identifier is 2353. Jan 07 09:12:44 managed-node2 podman[27035]: 2025-01-07 09:12:44.111035063 -0500 EST m=+0.026022472 pod stop 8f0fc920afcf00e279a9b03adf320d9dc3479f7fd5a25611aee0e7dfa8dddc68 (image=, name=httpd2) Jan 07 09:12:44 managed-node2 systemd[1]: libpod-ae0a056053dd5ad5a2effac7e82699f32fe04138febd524d2465eda1dd4568ea.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-ae0a056053dd5ad5a2effac7e82699f32fe04138febd524d2465eda1dd4568ea.scope has successfully entered the 'dead' state. Jan 07 09:12:44 managed-node2 podman[27035]: 2025-01-07 09:12:44.130046795 -0500 EST m=+0.045034296 container died ae0a056053dd5ad5a2effac7e82699f32fe04138febd524d2465eda1dd4568ea (image=localhost/podman-pause:5.3.1-1733097600, name=8f0fc920afcf-infra, io.buildah.version=1.38.0) Jan 07 09:12:44 managed-node2 aardvark-dns[26498]: Received SIGHUP Jan 07 09:12:44 managed-node2 aardvark-dns[26498]: Successfully parsed config Jan 07 09:12:44 managed-node2 aardvark-dns[26498]: Listen v4 ip {} Jan 07 09:12:44 managed-node2 aardvark-dns[26498]: Listen v6 ip {} Jan 07 09:12:44 managed-node2 aardvark-dns[26498]: No configuration found stopping the sever Jan 07 09:12:44 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Jan 07 09:12:44 managed-node2 kernel: veth0 (unregistering): left allmulticast mode Jan 07 09:12:44 managed-node2 kernel: veth0 (unregistering): left promiscuous mode Jan 07 09:12:44 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Jan 07 09:12:44 managed-node2 systemd[1]: run-rf869b629f5524c4e8aba6919e24741dc.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-rf869b629f5524c4e8aba6919e24741dc.scope has successfully entered the 'dead' state. Jan 07 09:12:44 managed-node2 /usr/bin/podman[27046]: time="2025-01-07T09:12:44-05:00" level=debug msg="Called cleanup.PersistentPreRunE(/usr/bin/podman --root /var/lib/containers/storage --runroot /run/containers/storage --log-level debug --cgroup-manager systemd --tmpdir /run/libpod --network-config-dir --network-backend netavark --volumepath /var/lib/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime crun --storage-driver overlay --storage-opt overlay.mountopt=nodev,metacopy=on --events-backend journald --syslog container cleanup --stopped-only ae0a056053dd5ad5a2effac7e82699f32fe04138febd524d2465eda1dd4568ea)" Jan 07 09:12:44 managed-node2 /usr/bin/podman[27046]: time="2025-01-07T09:12:44-05:00" level=debug msg="Setting custom database backend: \"sqlite\"" Jan 07 09:12:44 managed-node2 /usr/bin/podman[27046]: time="2025-01-07T09:12:44-05:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" Jan 07 09:12:44 managed-node2 /usr/bin/podman[27046]: time="2025-01-07T09:12:44-05:00" level=info msg="Using sqlite as database backend" Jan 07 09:12:44 managed-node2 /usr/bin/podman[27046]: time="2025-01-07T09:12:44-05:00" level=debug msg="Using graph driver overlay" Jan 07 09:12:44 managed-node2 /usr/bin/podman[27046]: time="2025-01-07T09:12:44-05:00" level=debug msg="Using graph root /var/lib/containers/storage" Jan 07 09:12:44 managed-node2 /usr/bin/podman[27046]: time="2025-01-07T09:12:44-05:00" level=debug msg="Using run root /run/containers/storage" Jan 07 09:12:44 managed-node2 /usr/bin/podman[27046]: time="2025-01-07T09:12:44-05:00" level=debug msg="Using static dir /var/lib/containers/storage/libpod" Jan 07 09:12:44 managed-node2 /usr/bin/podman[27046]: time="2025-01-07T09:12:44-05:00" level=debug msg="Using tmp dir /run/libpod" Jan 07 09:12:44 managed-node2 /usr/bin/podman[27046]: time="2025-01-07T09:12:44-05:00" level=debug msg="Using volume path /var/lib/containers/storage/volumes" Jan 07 09:12:44 managed-node2 /usr/bin/podman[27046]: time="2025-01-07T09:12:44-05:00" level=debug msg="Using transient store: false" Jan 07 09:12:44 managed-node2 /usr/bin/podman[27046]: time="2025-01-07T09:12:44-05:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" Jan 07 09:12:44 managed-node2 /usr/bin/podman[27046]: time="2025-01-07T09:12:44-05:00" level=debug msg="Cached value indicated that overlay is supported" Jan 07 09:12:44 managed-node2 /usr/bin/podman[27046]: time="2025-01-07T09:12:44-05:00" level=debug msg="Cached value indicated that overlay is supported" Jan 07 09:12:44 managed-node2 /usr/bin/podman[27046]: time="2025-01-07T09:12:44-05:00" level=debug msg="Cached value indicated that metacopy is being used" Jan 07 09:12:44 managed-node2 /usr/bin/podman[27046]: time="2025-01-07T09:12:44-05:00" level=debug msg="Cached value indicated that native-diff is not being used" Jan 07 09:12:44 managed-node2 /usr/bin/podman[27046]: time="2025-01-07T09:12:44-05:00" level=info msg="Not using native diff for overlay, this may cause degraded performance for building images: kernel has CONFIG_OVERLAY_FS_REDIRECT_DIR enabled" Jan 07 09:12:44 managed-node2 /usr/bin/podman[27046]: time="2025-01-07T09:12:44-05:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=false, usingMetacopy=true" Jan 07 09:12:44 managed-node2 /usr/bin/podman[27046]: time="2025-01-07T09:12:44-05:00" level=debug msg="Initializing event backend journald" Jan 07 09:12:44 managed-node2 /usr/bin/podman[27046]: time="2025-01-07T09:12:44-05:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" Jan 07 09:12:44 managed-node2 /usr/bin/podman[27046]: time="2025-01-07T09:12:44-05:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" Jan 07 09:12:44 managed-node2 /usr/bin/podman[27046]: time="2025-01-07T09:12:44-05:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" Jan 07 09:12:44 managed-node2 /usr/bin/podman[27046]: time="2025-01-07T09:12:44-05:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" Jan 07 09:12:44 managed-node2 /usr/bin/podman[27046]: time="2025-01-07T09:12:44-05:00" level=debug msg="Configured OCI runtime crun-vm initialization failed: no valid executable found for OCI runtime crun-vm: invalid argument" Jan 07 09:12:44 managed-node2 /usr/bin/podman[27046]: time="2025-01-07T09:12:44-05:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" Jan 07 09:12:44 managed-node2 /usr/bin/podman[27046]: time="2025-01-07T09:12:44-05:00" level=debug msg="Configured OCI runtime runc initialization failed: no valid executable found for OCI runtime runc: invalid argument" Jan 07 09:12:44 managed-node2 /usr/bin/podman[27046]: time="2025-01-07T09:12:44-05:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" Jan 07 09:12:44 managed-node2 /usr/bin/podman[27046]: time="2025-01-07T09:12:44-05:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" Jan 07 09:12:44 managed-node2 /usr/bin/podman[27046]: time="2025-01-07T09:12:44-05:00" level=debug msg="Using OCI runtime \"/usr/bin/crun\"" Jan 07 09:12:44 managed-node2 /usr/bin/podman[27046]: time="2025-01-07T09:12:44-05:00" level=info msg="Setting parallel job count to 7" Jan 07 09:12:44 managed-node2 NetworkManager[780]: [1736259164.1738] device (podman1): state change: activated -> unmanaged (reason 'unmanaged', managed-type: 'removed') Jan 07 09:12:44 managed-node2 systemd[1]: run-netns-netns\x2dca2332a8\x2dba7a\x2de980\x2d636a\x2d50c016800d1d.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-netns-netns\x2dca2332a8\x2dba7a\x2de980\x2d636a\x2d50c016800d1d.mount has successfully entered the 'dead' state. Jan 07 09:12:44 managed-node2 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-ae0a056053dd5ad5a2effac7e82699f32fe04138febd524d2465eda1dd4568ea-userdata-shm.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay\x2dcontainers-ae0a056053dd5ad5a2effac7e82699f32fe04138febd524d2465eda1dd4568ea-userdata-shm.mount has successfully entered the 'dead' state. Jan 07 09:12:44 managed-node2 systemd[1]: var-lib-containers-storage-overlay-281ea9e93260ff02b0254fe28758f17f91023cd829478d2bd5882af4927740e2-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-281ea9e93260ff02b0254fe28758f17f91023cd829478d2bd5882af4927740e2-merged.mount has successfully entered the 'dead' state. Jan 07 09:12:44 managed-node2 podman[27035]: 2025-01-07 09:12:44.25742555 -0500 EST m=+0.172412876 container cleanup ae0a056053dd5ad5a2effac7e82699f32fe04138febd524d2465eda1dd4568ea (image=localhost/podman-pause:5.3.1-1733097600, name=8f0fc920afcf-infra, pod_id=8f0fc920afcf00e279a9b03adf320d9dc3479f7fd5a25611aee0e7dfa8dddc68, io.buildah.version=1.38.0) Jan 07 09:12:44 managed-node2 /usr/bin/podman[27046]: time="2025-01-07T09:12:44-05:00" level=debug msg="Called cleanup.PersistentPostRunE(/usr/bin/podman --root /var/lib/containers/storage --runroot /run/containers/storage --log-level debug --cgroup-manager systemd --tmpdir /run/libpod --network-config-dir --network-backend netavark --volumepath /var/lib/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime crun --storage-driver overlay --storage-opt overlay.mountopt=nodev,metacopy=on --events-backend journald --syslog container cleanup --stopped-only ae0a056053dd5ad5a2effac7e82699f32fe04138febd524d2465eda1dd4568ea)" Jan 07 09:12:44 managed-node2 /usr/bin/podman[27046]: time="2025-01-07T09:12:44-05:00" level=debug msg="Shutting down engines" Jan 07 09:12:44 managed-node2 systemd[1]: libpod-conmon-ae0a056053dd5ad5a2effac7e82699f32fe04138febd524d2465eda1dd4568ea.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-conmon-ae0a056053dd5ad5a2effac7e82699f32fe04138febd524d2465eda1dd4568ea.scope has successfully entered the 'dead' state. Jan 07 09:12:54 managed-node2 podman[27035]: time="2025-01-07T09:12:54-05:00" level=warning msg="StopSignal SIGTERM failed to stop container httpd2-httpd2 in 10 seconds, resorting to SIGKILL" Jan 07 09:12:54 managed-node2 conmon[26517]: conmon c681c2767734a4dfc45b : container 26519 exited with status 137 Jan 07 09:12:54 managed-node2 systemd[1]: libpod-c681c2767734a4dfc45b77adb3f727c394f2eb04b4e1f9b759299b366d1e80c3.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-c681c2767734a4dfc45b77adb3f727c394f2eb04b4e1f9b759299b366d1e80c3.scope has successfully entered the 'dead' state. Jan 07 09:12:54 managed-node2 conmon[26517]: conmon c681c2767734a4dfc45b : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_8f0fc920afcf00e279a9b03adf320d9dc3479f7fd5a25611aee0e7dfa8dddc68.slice/libpod-c681c2767734a4dfc45b77adb3f727c394f2eb04b4e1f9b759299b366d1e80c3.scope/container/memory.events Jan 07 09:12:54 managed-node2 podman[27035]: 2025-01-07 09:12:54.153206906 -0500 EST m=+10.068194509 container died c681c2767734a4dfc45b77adb3f727c394f2eb04b4e1f9b759299b366d1e80c3 (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jan 07 09:12:54 managed-node2 /usr/bin/podman[27068]: time="2025-01-07T09:12:54-05:00" level=debug msg="Called cleanup.PersistentPreRunE(/usr/bin/podman --root /var/lib/containers/storage --runroot /run/containers/storage --log-level debug --cgroup-manager systemd --tmpdir /run/libpod --network-config-dir --network-backend netavark --volumepath /var/lib/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime crun --storage-driver overlay --storage-opt overlay.mountopt=nodev,metacopy=on --events-backend journald --syslog container cleanup --stopped-only c681c2767734a4dfc45b77adb3f727c394f2eb04b4e1f9b759299b366d1e80c3)" Jan 07 09:12:54 managed-node2 /usr/bin/podman[27068]: time="2025-01-07T09:12:54-05:00" level=debug msg="Setting custom database backend: \"sqlite\"" Jan 07 09:12:54 managed-node2 /usr/bin/podman[27068]: time="2025-01-07T09:12:54-05:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" Jan 07 09:12:54 managed-node2 /usr/bin/podman[27068]: time="2025-01-07T09:12:54-05:00" level=info msg="Using sqlite as database backend" Jan 07 09:12:54 managed-node2 /usr/bin/podman[27068]: time="2025-01-07T09:12:54-05:00" level=debug msg="Using graph driver overlay" Jan 07 09:12:54 managed-node2 /usr/bin/podman[27068]: time="2025-01-07T09:12:54-05:00" level=debug msg="Using graph root /var/lib/containers/storage" Jan 07 09:12:54 managed-node2 /usr/bin/podman[27068]: time="2025-01-07T09:12:54-05:00" level=debug msg="Using run root /run/containers/storage" Jan 07 09:12:54 managed-node2 /usr/bin/podman[27068]: time="2025-01-07T09:12:54-05:00" level=debug msg="Using static dir /var/lib/containers/storage/libpod" Jan 07 09:12:54 managed-node2 /usr/bin/podman[27068]: time="2025-01-07T09:12:54-05:00" level=debug msg="Using tmp dir /run/libpod" Jan 07 09:12:54 managed-node2 /usr/bin/podman[27068]: time="2025-01-07T09:12:54-05:00" level=debug msg="Using volume path /var/lib/containers/storage/volumes" Jan 07 09:12:54 managed-node2 /usr/bin/podman[27068]: time="2025-01-07T09:12:54-05:00" level=debug msg="Using transient store: false" Jan 07 09:12:54 managed-node2 /usr/bin/podman[27068]: time="2025-01-07T09:12:54-05:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" Jan 07 09:12:54 managed-node2 /usr/bin/podman[27068]: time="2025-01-07T09:12:54-05:00" level=debug msg="Cached value indicated that overlay is supported" Jan 07 09:12:54 managed-node2 /usr/bin/podman[27068]: time="2025-01-07T09:12:54-05:00" level=debug msg="Cached value indicated that overlay is supported" Jan 07 09:12:54 managed-node2 /usr/bin/podman[27068]: time="2025-01-07T09:12:54-05:00" level=debug msg="Cached value indicated that metacopy is being used" Jan 07 09:12:54 managed-node2 /usr/bin/podman[27068]: time="2025-01-07T09:12:54-05:00" level=debug msg="Cached value indicated that native-diff is not being used" Jan 07 09:12:54 managed-node2 /usr/bin/podman[27068]: time="2025-01-07T09:12:54-05:00" level=info msg="Not using native diff for overlay, this may cause degraded performance for building images: kernel has CONFIG_OVERLAY_FS_REDIRECT_DIR enabled" Jan 07 09:12:54 managed-node2 /usr/bin/podman[27068]: time="2025-01-07T09:12:54-05:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=false, usingMetacopy=true" Jan 07 09:12:54 managed-node2 /usr/bin/podman[27068]: time="2025-01-07T09:12:54-05:00" level=debug msg="Initializing event backend journald" Jan 07 09:12:54 managed-node2 /usr/bin/podman[27068]: time="2025-01-07T09:12:54-05:00" level=debug msg="Configured OCI runtime crun-vm initialization failed: no valid executable found for OCI runtime crun-vm: invalid argument" Jan 07 09:12:54 managed-node2 /usr/bin/podman[27068]: time="2025-01-07T09:12:54-05:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" Jan 07 09:12:54 managed-node2 /usr/bin/podman[27068]: time="2025-01-07T09:12:54-05:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" Jan 07 09:12:54 managed-node2 /usr/bin/podman[27068]: time="2025-01-07T09:12:54-05:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" Jan 07 09:12:54 managed-node2 /usr/bin/podman[27068]: time="2025-01-07T09:12:54-05:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" Jan 07 09:12:54 managed-node2 /usr/bin/podman[27068]: time="2025-01-07T09:12:54-05:00" level=debug msg="Configured OCI runtime runc initialization failed: no valid executable found for OCI runtime runc: invalid argument" Jan 07 09:12:54 managed-node2 /usr/bin/podman[27068]: time="2025-01-07T09:12:54-05:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" Jan 07 09:12:54 managed-node2 /usr/bin/podman[27068]: time="2025-01-07T09:12:54-05:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" Jan 07 09:12:54 managed-node2 /usr/bin/podman[27068]: time="2025-01-07T09:12:54-05:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" Jan 07 09:12:54 managed-node2 /usr/bin/podman[27068]: time="2025-01-07T09:12:54-05:00" level=debug msg="Using OCI runtime \"/usr/bin/crun\"" Jan 07 09:12:54 managed-node2 /usr/bin/podman[27068]: time="2025-01-07T09:12:54-05:00" level=info msg="Setting parallel job count to 7" Jan 07 09:12:54 managed-node2 systemd[1]: var-lib-containers-storage-overlay-06c7300d6e50696d3353ec47fe77f8673402472a4e676d4268c9f47b0148eed3-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-06c7300d6e50696d3353ec47fe77f8673402472a4e676d4268c9f47b0148eed3-merged.mount has successfully entered the 'dead' state. Jan 07 09:12:54 managed-node2 podman[27035]: 2025-01-07 09:12:54.197316357 -0500 EST m=+10.112303685 container cleanup c681c2767734a4dfc45b77adb3f727c394f2eb04b4e1f9b759299b366d1e80c3 (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=8f0fc920afcf00e279a9b03adf320d9dc3479f7fd5a25611aee0e7dfa8dddc68, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jan 07 09:12:54 managed-node2 /usr/bin/podman[27068]: time="2025-01-07T09:12:54-05:00" level=debug msg="Called cleanup.PersistentPostRunE(/usr/bin/podman --root /var/lib/containers/storage --runroot /run/containers/storage --log-level debug --cgroup-manager systemd --tmpdir /run/libpod --network-config-dir --network-backend netavark --volumepath /var/lib/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime crun --storage-driver overlay --storage-opt overlay.mountopt=nodev,metacopy=on --events-backend journald --syslog container cleanup --stopped-only c681c2767734a4dfc45b77adb3f727c394f2eb04b4e1f9b759299b366d1e80c3)" Jan 07 09:12:54 managed-node2 /usr/bin/podman[27068]: time="2025-01-07T09:12:54-05:00" level=debug msg="Shutting down engines" Jan 07 09:12:54 managed-node2 /usr/bin/podman[27068]: time="2025-01-07T09:12:54-05:00" level=info msg="Received shutdown.Stop(), terminating!" PID=27068 Jan 07 09:12:54 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 07 09:12:54 managed-node2 systemd[1]: libpod-conmon-c681c2767734a4dfc45b77adb3f727c394f2eb04b4e1f9b759299b366d1e80c3.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-conmon-c681c2767734a4dfc45b77adb3f727c394f2eb04b4e1f9b759299b366d1e80c3.scope has successfully entered the 'dead' state. Jan 07 09:12:54 managed-node2 systemd[1]: Stopped libpod-conmon-c681c2767734a4dfc45b77adb3f727c394f2eb04b4e1f9b759299b366d1e80c3.scope. ░░ Subject: A stop job for unit libpod-conmon-c681c2767734a4dfc45b77adb3f727c394f2eb04b4e1f9b759299b366d1e80c3.scope has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit libpod-conmon-c681c2767734a4dfc45b77adb3f727c394f2eb04b4e1f9b759299b366d1e80c3.scope has finished. ░░ ░░ The job identifier is 2439 and the job result is done. Jan 07 09:12:54 managed-node2 systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state. Jan 07 09:12:54 managed-node2 systemd[1]: Removed slice machine-libpod_pod_8f0fc920afcf00e279a9b03adf320d9dc3479f7fd5a25611aee0e7dfa8dddc68.slice - cgroup machine-libpod_pod_8f0fc920afcf00e279a9b03adf320d9dc3479f7fd5a25611aee0e7dfa8dddc68.slice. ░░ Subject: A stop job for unit machine-libpod_pod_8f0fc920afcf00e279a9b03adf320d9dc3479f7fd5a25611aee0e7dfa8dddc68.slice has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit machine-libpod_pod_8f0fc920afcf00e279a9b03adf320d9dc3479f7fd5a25611aee0e7dfa8dddc68.slice has finished. ░░ ░░ The job identifier is 2438 and the job result is done. Jan 07 09:12:54 managed-node2 podman[27035]: 2025-01-07 09:12:54.239866271 -0500 EST m=+10.154853663 pod stop 8f0fc920afcf00e279a9b03adf320d9dc3479f7fd5a25611aee0e7dfa8dddc68 (image=, name=httpd2) Jan 07 09:12:54 managed-node2 systemd[1]: machine-libpod_pod_8f0fc920afcf00e279a9b03adf320d9dc3479f7fd5a25611aee0e7dfa8dddc68.slice: Failed to open /run/systemd/transient/machine-libpod_pod_8f0fc920afcf00e279a9b03adf320d9dc3479f7fd5a25611aee0e7dfa8dddc68.slice: No such file or directory Jan 07 09:12:54 managed-node2 podman[27035]: 2025-01-07 09:12:54.248256619 -0500 EST m=+10.163244142 pod stop 8f0fc920afcf00e279a9b03adf320d9dc3479f7fd5a25611aee0e7dfa8dddc68 (image=, name=httpd2) Jan 07 09:12:54 managed-node2 systemd[1]: machine-libpod_pod_8f0fc920afcf00e279a9b03adf320d9dc3479f7fd5a25611aee0e7dfa8dddc68.slice: Failed to open /run/systemd/transient/machine-libpod_pod_8f0fc920afcf00e279a9b03adf320d9dc3479f7fd5a25611aee0e7dfa8dddc68.slice: No such file or directory Jan 07 09:12:54 managed-node2 podman[27035]: 2025-01-07 09:12:54.277677895 -0500 EST m=+10.192665225 container remove c681c2767734a4dfc45b77adb3f727c394f2eb04b4e1f9b759299b366d1e80c3 (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=8f0fc920afcf00e279a9b03adf320d9dc3479f7fd5a25611aee0e7dfa8dddc68, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jan 07 09:12:54 managed-node2 podman[27035]: 2025-01-07 09:12:54.300987047 -0500 EST m=+10.215974378 container remove ae0a056053dd5ad5a2effac7e82699f32fe04138febd524d2465eda1dd4568ea (image=localhost/podman-pause:5.3.1-1733097600, name=8f0fc920afcf-infra, pod_id=8f0fc920afcf00e279a9b03adf320d9dc3479f7fd5a25611aee0e7dfa8dddc68, io.buildah.version=1.38.0) Jan 07 09:12:54 managed-node2 systemd[1]: machine-libpod_pod_8f0fc920afcf00e279a9b03adf320d9dc3479f7fd5a25611aee0e7dfa8dddc68.slice: Failed to open /run/systemd/transient/machine-libpod_pod_8f0fc920afcf00e279a9b03adf320d9dc3479f7fd5a25611aee0e7dfa8dddc68.slice: No such file or directory Jan 07 09:12:54 managed-node2 podman[27035]: 2025-01-07 09:12:54.309692431 -0500 EST m=+10.224679763 pod remove 8f0fc920afcf00e279a9b03adf320d9dc3479f7fd5a25611aee0e7dfa8dddc68 (image=, name=httpd2) Jan 07 09:12:54 managed-node2 podman[27035]: Pods stopped: Jan 07 09:12:54 managed-node2 podman[27035]: 8f0fc920afcf00e279a9b03adf320d9dc3479f7fd5a25611aee0e7dfa8dddc68 Jan 07 09:12:54 managed-node2 podman[27035]: Pods removed: Jan 07 09:12:54 managed-node2 podman[27035]: 8f0fc920afcf00e279a9b03adf320d9dc3479f7fd5a25611aee0e7dfa8dddc68 Jan 07 09:12:54 managed-node2 podman[27035]: Secrets removed: Jan 07 09:12:54 managed-node2 podman[27035]: Volumes removed: Jan 07 09:12:54 managed-node2 podman[27035]: 2025-01-07 09:12:54.330810793 -0500 EST m=+10.245798192 container create 7b54a3800ffcaa03646d30c37320971be59ba4f6b989157b4e4ebf6595963962 (image=localhost/podman-pause:5.3.1-1733097600, name=5d7db1ec4fb5-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service) Jan 07 09:12:54 managed-node2 systemd[1]: Created slice machine-libpod_pod_5c56b3244e885d4446e136c2928e76f5420753b7acba55842894d035d2fa49a0.slice - cgroup machine-libpod_pod_5c56b3244e885d4446e136c2928e76f5420753b7acba55842894d035d2fa49a0.slice. ░░ Subject: A start job for unit machine-libpod_pod_5c56b3244e885d4446e136c2928e76f5420753b7acba55842894d035d2fa49a0.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit machine-libpod_pod_5c56b3244e885d4446e136c2928e76f5420753b7acba55842894d035d2fa49a0.slice has finished successfully. ░░ ░░ The job identifier is 2440. Jan 07 09:12:54 managed-node2 podman[27035]: 2025-01-07 09:12:54.379485088 -0500 EST m=+10.294472531 container create b8b3f35398f110339f901ee0df97db9680f63fa6b304d3b28412fe1a3ecb7db2 (image=localhost/podman-pause:5.3.1-1733097600, name=5c56b3244e88-infra, pod_id=5c56b3244e885d4446e136c2928e76f5420753b7acba55842894d035d2fa49a0, io.buildah.version=1.38.0, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service) Jan 07 09:12:54 managed-node2 podman[27035]: 2025-01-07 09:12:54.38382167 -0500 EST m=+10.298809084 pod create 5c56b3244e885d4446e136c2928e76f5420753b7acba55842894d035d2fa49a0 (image=, name=httpd2) Jan 07 09:12:54 managed-node2 podman[27035]: 2025-01-07 09:12:54.41146549 -0500 EST m=+10.326453040 container create 153c7d559088904444cbd10e5bba9a4723631858835088bca091378f6b60c161 (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=5c56b3244e885d4446e136c2928e76f5420753b7acba55842894d035d2fa49a0, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, app=test, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service) Jan 07 09:12:54 managed-node2 podman[27035]: 2025-01-07 09:12:54.411846857 -0500 EST m=+10.326834192 container restart 7b54a3800ffcaa03646d30c37320971be59ba4f6b989157b4e4ebf6595963962 (image=localhost/podman-pause:5.3.1-1733097600, name=5d7db1ec4fb5-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service) Jan 07 09:12:54 managed-node2 systemd[1]: Started libpod-7b54a3800ffcaa03646d30c37320971be59ba4f6b989157b4e4ebf6595963962.scope - libcrun container. ░░ Subject: A start job for unit libpod-7b54a3800ffcaa03646d30c37320971be59ba4f6b989157b4e4ebf6595963962.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-7b54a3800ffcaa03646d30c37320971be59ba4f6b989157b4e4ebf6595963962.scope has finished successfully. ░░ ░░ The job identifier is 2446. Jan 07 09:12:54 managed-node2 podman[27035]: 2025-01-07 09:12:54.385865098 -0500 EST m=+10.300852557 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610 Jan 07 09:12:54 managed-node2 podman[27035]: 2025-01-07 09:12:54.477339275 -0500 EST m=+10.392326692 container init 7b54a3800ffcaa03646d30c37320971be59ba4f6b989157b4e4ebf6595963962 (image=localhost/podman-pause:5.3.1-1733097600, name=5d7db1ec4fb5-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service) Jan 07 09:12:54 managed-node2 podman[27035]: 2025-01-07 09:12:54.479789417 -0500 EST m=+10.394776795 container start 7b54a3800ffcaa03646d30c37320971be59ba4f6b989157b4e4ebf6595963962 (image=localhost/podman-pause:5.3.1-1733097600, name=5d7db1ec4fb5-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service) Jan 07 09:12:54 managed-node2 NetworkManager[780]: [1736259174.4908] manager: (podman1): new Bridge device (/org/freedesktop/NetworkManager/Devices/5) Jan 07 09:12:54 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Jan 07 09:12:54 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Jan 07 09:12:54 managed-node2 kernel: veth0: entered allmulticast mode Jan 07 09:12:54 managed-node2 kernel: veth0: entered promiscuous mode Jan 07 09:12:54 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Jan 07 09:12:54 managed-node2 kernel: podman1: port 1(veth0) entered forwarding state Jan 07 09:12:54 managed-node2 NetworkManager[780]: [1736259174.5064] manager: (veth0): new Veth device (/org/freedesktop/NetworkManager/Devices/6) Jan 07 09:12:54 managed-node2 NetworkManager[780]: [1736259174.5077] device (veth0): carrier: link connected Jan 07 09:12:54 managed-node2 NetworkManager[780]: [1736259174.5082] device (podman1): carrier: link connected Jan 07 09:12:54 managed-node2 (udev-worker)[27089]: Network interface NamePolicy= disabled on kernel command line. Jan 07 09:12:54 managed-node2 (udev-worker)[27088]: Network interface NamePolicy= disabled on kernel command line. Jan 07 09:12:54 managed-node2 NetworkManager[780]: [1736259174.5468] device (podman1): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external') Jan 07 09:12:54 managed-node2 NetworkManager[780]: [1736259174.5477] device (podman1): state change: unavailable -> disconnected (reason 'connection-assumed', managed-type: 'external') Jan 07 09:12:54 managed-node2 NetworkManager[780]: [1736259174.5502] device (podman1): Activation: starting connection 'podman1' (4124c740-ce81-487f-85a8-9f75de9cacda) Jan 07 09:12:54 managed-node2 NetworkManager[780]: [1736259174.5507] device (podman1): state change: disconnected -> prepare (reason 'none', managed-type: 'external') Jan 07 09:12:54 managed-node2 NetworkManager[780]: [1736259174.5511] device (podman1): state change: prepare -> config (reason 'none', managed-type: 'external') Jan 07 09:12:54 managed-node2 NetworkManager[780]: [1736259174.5513] device (podman1): state change: config -> ip-config (reason 'none', managed-type: 'external') Jan 07 09:12:54 managed-node2 NetworkManager[780]: [1736259174.5517] device (podman1): state change: ip-config -> ip-check (reason 'none', managed-type: 'external') Jan 07 09:12:54 managed-node2 systemd[1]: Starting NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service... ░░ Subject: A start job for unit NetworkManager-dispatcher.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has begun execution. ░░ ░░ The job identifier is 2453. Jan 07 09:12:54 managed-node2 systemd[1]: Started run-r22f39ad3274e4348b99f7c15feb5c41c.scope - /usr/libexec/podman/aardvark-dns --config /run/containers/networks/aardvark-dns -p 53 run. ░░ Subject: A start job for unit run-r22f39ad3274e4348b99f7c15feb5c41c.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit run-r22f39ad3274e4348b99f7c15feb5c41c.scope has finished successfully. ░░ ░░ The job identifier is 2532. Jan 07 09:12:54 managed-node2 systemd[1]: Started NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service. ░░ Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has finished successfully. ░░ ░░ The job identifier is 2453. Jan 07 09:12:54 managed-node2 NetworkManager[780]: [1736259174.6074] device (podman1): state change: ip-check -> secondaries (reason 'none', managed-type: 'external') Jan 07 09:12:54 managed-node2 NetworkManager[780]: [1736259174.6077] device (podman1): state change: secondaries -> activated (reason 'none', managed-type: 'external') Jan 07 09:12:54 managed-node2 NetworkManager[780]: [1736259174.6084] device (podman1): Activation: successful, device activated. Jan 07 09:12:54 managed-node2 systemd[1]: Started libpod-b8b3f35398f110339f901ee0df97db9680f63fa6b304d3b28412fe1a3ecb7db2.scope - libcrun container. ░░ Subject: A start job for unit libpod-b8b3f35398f110339f901ee0df97db9680f63fa6b304d3b28412fe1a3ecb7db2.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-b8b3f35398f110339f901ee0df97db9680f63fa6b304d3b28412fe1a3ecb7db2.scope has finished successfully. ░░ ░░ The job identifier is 2538. Jan 07 09:12:54 managed-node2 podman[27035]: 2025-01-07 09:12:54.768555677 -0500 EST m=+10.683543065 container init b8b3f35398f110339f901ee0df97db9680f63fa6b304d3b28412fe1a3ecb7db2 (image=localhost/podman-pause:5.3.1-1733097600, name=5c56b3244e88-infra, pod_id=5c56b3244e885d4446e136c2928e76f5420753b7acba55842894d035d2fa49a0, io.buildah.version=1.38.0, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service) Jan 07 09:12:54 managed-node2 podman[27035]: 2025-01-07 09:12:54.771841771 -0500 EST m=+10.686829111 container start b8b3f35398f110339f901ee0df97db9680f63fa6b304d3b28412fe1a3ecb7db2 (image=localhost/podman-pause:5.3.1-1733097600, name=5c56b3244e88-infra, pod_id=5c56b3244e885d4446e136c2928e76f5420753b7acba55842894d035d2fa49a0, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service, io.buildah.version=1.38.0) Jan 07 09:12:54 managed-node2 systemd[1]: Started libpod-153c7d559088904444cbd10e5bba9a4723631858835088bca091378f6b60c161.scope - libcrun container. ░░ Subject: A start job for unit libpod-153c7d559088904444cbd10e5bba9a4723631858835088bca091378f6b60c161.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-153c7d559088904444cbd10e5bba9a4723631858835088bca091378f6b60c161.scope has finished successfully. ░░ ░░ The job identifier is 2545. Jan 07 09:12:54 managed-node2 podman[27035]: 2025-01-07 09:12:54.811050874 -0500 EST m=+10.726038235 container init 153c7d559088904444cbd10e5bba9a4723631858835088bca091378f6b60c161 (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=5c56b3244e885d4446e136c2928e76f5420753b7acba55842894d035d2fa49a0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jan 07 09:12:54 managed-node2 podman[27035]: 2025-01-07 09:12:54.813606683 -0500 EST m=+10.728594182 container start 153c7d559088904444cbd10e5bba9a4723631858835088bca091378f6b60c161 (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=5c56b3244e885d4446e136c2928e76f5420753b7acba55842894d035d2fa49a0, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jan 07 09:12:54 managed-node2 podman[27035]: 2025-01-07 09:12:54.818224788 -0500 EST m=+10.733212116 pod start 5c56b3244e885d4446e136c2928e76f5420753b7acba55842894d035d2fa49a0 (image=, name=httpd2) Jan 07 09:12:54 managed-node2 podman[27035]: Pod: Jan 07 09:12:54 managed-node2 podman[27035]: 5c56b3244e885d4446e136c2928e76f5420753b7acba55842894d035d2fa49a0 Jan 07 09:12:54 managed-node2 podman[27035]: Container: Jan 07 09:12:54 managed-node2 podman[27035]: 153c7d559088904444cbd10e5bba9a4723631858835088bca091378f6b60c161 Jan 07 09:12:54 managed-node2 systemd[1]: Started podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service - A template for running K8s workloads via podman-kube-play. ░░ Subject: A start job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service has finished successfully. ░░ ░░ The job identifier is 2353. Jan 07 09:12:56 managed-node2 python3.12[27269]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 07 09:12:57 managed-node2 python3.12[27402]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/httpd3.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 07 09:12:58 managed-node2 python3.12[27534]: ansible-file Invoked with path=/tmp/lsr_kom0gs4j_podman/httpd3 state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Jan 07 09:12:58 managed-node2 python3.12[27665]: ansible-file Invoked with path=/tmp/lsr_kom0gs4j_podman/httpd3-create state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Jan 07 09:13:03 managed-node2 podman[27827]: 2025-01-07 09:13:03.415467564 -0500 EST m=+4.214936850 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610 Jan 07 09:13:03 managed-node2 python3.12[27972]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 07 09:13:04 managed-node2 python3.12[28103]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 07 09:13:04 managed-node2 systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state. Jan 07 09:13:04 managed-node2 python3.12[28234]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 07 09:13:05 managed-node2 python3.12[28340]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/ansible-kubernetes.d/httpd3.yml owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1736259184.4436557-17141-206954730606473/.source.yml _original_basename=.h5y_v4b2 follow=False checksum=3f9fb5fb859fc9596ef344d6e422ee2f64209bc4 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jan 07 09:13:05 managed-node2 python3.12[28471]: ansible-containers.podman.podman_play Invoked with state=started kube_file=/etc/containers/ansible-kubernetes.d/httpd3.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Jan 07 09:13:05 managed-node2 systemd[1]: Created slice machine-libpod_pod_ace8ef753c1dcf7138d1b0f4a0b2b78ed9e365871782b439684c6583797800ef.slice - cgroup machine-libpod_pod_ace8ef753c1dcf7138d1b0f4a0b2b78ed9e365871782b439684c6583797800ef.slice. ░░ Subject: A start job for unit machine-libpod_pod_ace8ef753c1dcf7138d1b0f4a0b2b78ed9e365871782b439684c6583797800ef.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit machine-libpod_pod_ace8ef753c1dcf7138d1b0f4a0b2b78ed9e365871782b439684c6583797800ef.slice has finished successfully. ░░ ░░ The job identifier is 2552. Jan 07 09:13:05 managed-node2 podman[28478]: 2025-01-07 09:13:05.662283243 -0500 EST m=+0.059054626 container create 4e013f3b0cbbb21554ae27e004216ebf4bb63a7077f5fdffa4273b6aa7fadb80 (image=localhost/podman-pause:5.3.1-1733097600, name=ace8ef753c1d-infra, pod_id=ace8ef753c1dcf7138d1b0f4a0b2b78ed9e365871782b439684c6583797800ef, io.buildah.version=1.38.0) Jan 07 09:13:05 managed-node2 podman[28478]: 2025-01-07 09:13:05.667015743 -0500 EST m=+0.063787068 pod create ace8ef753c1dcf7138d1b0f4a0b2b78ed9e365871782b439684c6583797800ef (image=, name=httpd3) Jan 07 09:13:05 managed-node2 podman[28478]: 2025-01-07 09:13:05.6927368 -0500 EST m=+0.089508198 container create c3289e9173085a5b5a1a30bb43f9c80d177bc2b36a900afe4944f56de35a95ff (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=ace8ef753c1dcf7138d1b0f4a0b2b78ed9e365871782b439684c6583797800ef, io.containers.autoupdate=registry, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jan 07 09:13:05 managed-node2 kernel: podman1: port 2(veth1) entered blocking state Jan 07 09:13:05 managed-node2 kernel: podman1: port 2(veth1) entered disabled state Jan 07 09:13:05 managed-node2 kernel: veth1: entered allmulticast mode Jan 07 09:13:05 managed-node2 podman[28478]: 2025-01-07 09:13:05.669222958 -0500 EST m=+0.065994400 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610 Jan 07 09:13:05 managed-node2 kernel: veth1: entered promiscuous mode Jan 07 09:13:05 managed-node2 kernel: podman1: port 2(veth1) entered blocking state Jan 07 09:13:05 managed-node2 kernel: podman1: port 2(veth1) entered forwarding state Jan 07 09:13:05 managed-node2 NetworkManager[780]: [1736259185.7231] manager: (veth1): new Veth device (/org/freedesktop/NetworkManager/Devices/7) Jan 07 09:13:05 managed-node2 NetworkManager[780]: [1736259185.7291] device (veth1): carrier: link connected Jan 07 09:13:05 managed-node2 (udev-worker)[28494]: Network interface NamePolicy= disabled on kernel command line. Jan 07 09:13:05 managed-node2 systemd[1]: Started libpod-conmon-4e013f3b0cbbb21554ae27e004216ebf4bb63a7077f5fdffa4273b6aa7fadb80.scope. ░░ Subject: A start job for unit libpod-conmon-4e013f3b0cbbb21554ae27e004216ebf4bb63a7077f5fdffa4273b6aa7fadb80.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-conmon-4e013f3b0cbbb21554ae27e004216ebf4bb63a7077f5fdffa4273b6aa7fadb80.scope has finished successfully. ░░ ░░ The job identifier is 2559. Jan 07 09:13:05 managed-node2 systemd[1]: Started libpod-4e013f3b0cbbb21554ae27e004216ebf4bb63a7077f5fdffa4273b6aa7fadb80.scope - libcrun container. ░░ Subject: A start job for unit libpod-4e013f3b0cbbb21554ae27e004216ebf4bb63a7077f5fdffa4273b6aa7fadb80.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-4e013f3b0cbbb21554ae27e004216ebf4bb63a7077f5fdffa4273b6aa7fadb80.scope has finished successfully. ░░ ░░ The job identifier is 2566. Jan 07 09:13:05 managed-node2 podman[28478]: 2025-01-07 09:13:05.828405419 -0500 EST m=+0.225177109 container init 4e013f3b0cbbb21554ae27e004216ebf4bb63a7077f5fdffa4273b6aa7fadb80 (image=localhost/podman-pause:5.3.1-1733097600, name=ace8ef753c1d-infra, pod_id=ace8ef753c1dcf7138d1b0f4a0b2b78ed9e365871782b439684c6583797800ef, io.buildah.version=1.38.0) Jan 07 09:13:05 managed-node2 podman[28478]: 2025-01-07 09:13:05.832160032 -0500 EST m=+0.228931455 container start 4e013f3b0cbbb21554ae27e004216ebf4bb63a7077f5fdffa4273b6aa7fadb80 (image=localhost/podman-pause:5.3.1-1733097600, name=ace8ef753c1d-infra, pod_id=ace8ef753c1dcf7138d1b0f4a0b2b78ed9e365871782b439684c6583797800ef, io.buildah.version=1.38.0) Jan 07 09:13:05 managed-node2 systemd[1]: Started libpod-conmon-c3289e9173085a5b5a1a30bb43f9c80d177bc2b36a900afe4944f56de35a95ff.scope. ░░ Subject: A start job for unit libpod-conmon-c3289e9173085a5b5a1a30bb43f9c80d177bc2b36a900afe4944f56de35a95ff.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-conmon-c3289e9173085a5b5a1a30bb43f9c80d177bc2b36a900afe4944f56de35a95ff.scope has finished successfully. ░░ ░░ The job identifier is 2573. Jan 07 09:13:05 managed-node2 systemd[1]: Started libpod-c3289e9173085a5b5a1a30bb43f9c80d177bc2b36a900afe4944f56de35a95ff.scope - libcrun container. ░░ Subject: A start job for unit libpod-c3289e9173085a5b5a1a30bb43f9c80d177bc2b36a900afe4944f56de35a95ff.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-c3289e9173085a5b5a1a30bb43f9c80d177bc2b36a900afe4944f56de35a95ff.scope has finished successfully. ░░ ░░ The job identifier is 2580. Jan 07 09:13:05 managed-node2 podman[28478]: 2025-01-07 09:13:05.893323556 -0500 EST m=+0.290094921 container init c3289e9173085a5b5a1a30bb43f9c80d177bc2b36a900afe4944f56de35a95ff (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=ace8ef753c1dcf7138d1b0f4a0b2b78ed9e365871782b439684c6583797800ef, io.containers.autoupdate=registry, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jan 07 09:13:05 managed-node2 podman[28478]: 2025-01-07 09:13:05.89629971 -0500 EST m=+0.293071039 container start c3289e9173085a5b5a1a30bb43f9c80d177bc2b36a900afe4944f56de35a95ff (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=ace8ef753c1dcf7138d1b0f4a0b2b78ed9e365871782b439684c6583797800ef, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, app=test, created_at=2021-06-10T18:55:36Z) Jan 07 09:13:05 managed-node2 podman[28478]: 2025-01-07 09:13:05.900970965 -0500 EST m=+0.297742301 pod start ace8ef753c1dcf7138d1b0f4a0b2b78ed9e365871782b439684c6583797800ef (image=, name=httpd3) Jan 07 09:13:06 managed-node2 python3.12[28658]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 07 09:13:06 managed-node2 systemd[1]: Reload requested from client PID 28659 ('systemctl') (unit session-6.scope)... Jan 07 09:13:06 managed-node2 systemd[1]: Reloading... Jan 07 09:13:06 managed-node2 systemd[1]: Reloading finished in 216 ms. Jan 07 09:13:06 managed-node2 systemd[1]: Starting fstrim.service - Discard unused blocks on filesystems from /etc/fstab... ░░ Subject: A start job for unit fstrim.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit fstrim.service has begun execution. ░░ ░░ The job identifier is 2587. Jan 07 09:13:07 managed-node2 systemd[1]: fstrim.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit fstrim.service has successfully entered the 'dead' state. Jan 07 09:13:07 managed-node2 systemd[1]: Finished fstrim.service - Discard unused blocks on filesystems from /etc/fstab. ░░ Subject: A start job for unit fstrim.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit fstrim.service has finished successfully. ░░ ░░ The job identifier is 2587. Jan 07 09:13:07 managed-node2 python3.12[28846]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service scope=system enabled=True daemon_reload=False daemon_reexec=False no_block=False state=None force=None masked=None Jan 07 09:13:07 managed-node2 systemd[1]: Reload requested from client PID 28849 ('systemctl') (unit session-6.scope)... Jan 07 09:13:07 managed-node2 systemd[1]: Reloading... Jan 07 09:13:07 managed-node2 systemd[1]: Reloading finished in 216 ms. Jan 07 09:13:08 managed-node2 python3.12[29034]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Jan 07 09:13:08 managed-node2 systemd[1]: Starting podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service - A template for running K8s workloads via podman-kube-play... ░░ Subject: A start job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service has begun execution. ░░ ░░ The job identifier is 2665. Jan 07 09:13:08 managed-node2 podman[29038]: 2025-01-07 09:13:08.647867233 -0500 EST m=+0.025545097 pod stop ace8ef753c1dcf7138d1b0f4a0b2b78ed9e365871782b439684c6583797800ef (image=, name=httpd3) Jan 07 09:13:08 managed-node2 systemd[1]: libpod-4e013f3b0cbbb21554ae27e004216ebf4bb63a7077f5fdffa4273b6aa7fadb80.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-4e013f3b0cbbb21554ae27e004216ebf4bb63a7077f5fdffa4273b6aa7fadb80.scope has successfully entered the 'dead' state. Jan 07 09:13:08 managed-node2 podman[29038]: 2025-01-07 09:13:08.671925638 -0500 EST m=+0.049603547 container died 4e013f3b0cbbb21554ae27e004216ebf4bb63a7077f5fdffa4273b6aa7fadb80 (image=localhost/podman-pause:5.3.1-1733097600, name=ace8ef753c1d-infra, io.buildah.version=1.38.0) Jan 07 09:13:08 managed-node2 kernel: podman1: port 2(veth1) entered disabled state Jan 07 09:13:08 managed-node2 kernel: veth1 (unregistering): left allmulticast mode Jan 07 09:13:08 managed-node2 kernel: veth1 (unregistering): left promiscuous mode Jan 07 09:13:08 managed-node2 kernel: podman1: port 2(veth1) entered disabled state Jan 07 09:13:08 managed-node2 systemd[1]: run-netns-netns\x2d61fd3e74\x2d76fb\x2d437e\x2d898c\x2d2a6c653e1740.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-netns-netns\x2d61fd3e74\x2d76fb\x2d437e\x2d898c\x2d2a6c653e1740.mount has successfully entered the 'dead' state. Jan 07 09:13:08 managed-node2 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-4e013f3b0cbbb21554ae27e004216ebf4bb63a7077f5fdffa4273b6aa7fadb80-userdata-shm.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay\x2dcontainers-4e013f3b0cbbb21554ae27e004216ebf4bb63a7077f5fdffa4273b6aa7fadb80-userdata-shm.mount has successfully entered the 'dead' state. Jan 07 09:13:08 managed-node2 systemd[1]: var-lib-containers-storage-overlay-2c7ef2a08c555fa94ed7dc34cb218fd5c019af0ad8db754ec30393e70bbc4a7c-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-2c7ef2a08c555fa94ed7dc34cb218fd5c019af0ad8db754ec30393e70bbc4a7c-merged.mount has successfully entered the 'dead' state. Jan 07 09:13:08 managed-node2 podman[29038]: 2025-01-07 09:13:08.746972652 -0500 EST m=+0.124650421 container cleanup 4e013f3b0cbbb21554ae27e004216ebf4bb63a7077f5fdffa4273b6aa7fadb80 (image=localhost/podman-pause:5.3.1-1733097600, name=ace8ef753c1d-infra, pod_id=ace8ef753c1dcf7138d1b0f4a0b2b78ed9e365871782b439684c6583797800ef, io.buildah.version=1.38.0) Jan 07 09:13:08 managed-node2 systemd[1]: libpod-conmon-4e013f3b0cbbb21554ae27e004216ebf4bb63a7077f5fdffa4273b6aa7fadb80.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-conmon-4e013f3b0cbbb21554ae27e004216ebf4bb63a7077f5fdffa4273b6aa7fadb80.scope has successfully entered the 'dead' state. Jan 07 09:13:18 managed-node2 podman[29038]: time="2025-01-07T09:13:18-05:00" level=warning msg="StopSignal SIGTERM failed to stop container httpd3-httpd3 in 10 seconds, resorting to SIGKILL" Jan 07 09:13:18 managed-node2 systemd[1]: libpod-c3289e9173085a5b5a1a30bb43f9c80d177bc2b36a900afe4944f56de35a95ff.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-c3289e9173085a5b5a1a30bb43f9c80d177bc2b36a900afe4944f56de35a95ff.scope has successfully entered the 'dead' state. Jan 07 09:13:18 managed-node2 podman[29038]: 2025-01-07 09:13:18.682592217 -0500 EST m=+10.060270133 container died c3289e9173085a5b5a1a30bb43f9c80d177bc2b36a900afe4944f56de35a95ff (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, app=test, created_at=2021-06-10T18:55:36Z) Jan 07 09:13:18 managed-node2 systemd[1]: var-lib-containers-storage-overlay-cedec7afe0a0745406bf75e45e9c627a4195378d5bd84ff9f8497d9a1a641b40-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-cedec7afe0a0745406bf75e45e9c627a4195378d5bd84ff9f8497d9a1a641b40-merged.mount has successfully entered the 'dead' state. Jan 07 09:13:18 managed-node2 podman[29038]: 2025-01-07 09:13:18.714723753 -0500 EST m=+10.092401491 container cleanup c3289e9173085a5b5a1a30bb43f9c80d177bc2b36a900afe4944f56de35a95ff (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=ace8ef753c1dcf7138d1b0f4a0b2b78ed9e365871782b439684c6583797800ef, io.containers.autoupdate=registry, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jan 07 09:13:18 managed-node2 systemd[1]: Stopping libpod-conmon-c3289e9173085a5b5a1a30bb43f9c80d177bc2b36a900afe4944f56de35a95ff.scope... ░░ Subject: A stop job for unit libpod-conmon-c3289e9173085a5b5a1a30bb43f9c80d177bc2b36a900afe4944f56de35a95ff.scope has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit libpod-conmon-c3289e9173085a5b5a1a30bb43f9c80d177bc2b36a900afe4944f56de35a95ff.scope has begun execution. ░░ ░░ The job identifier is 2751. Jan 07 09:13:18 managed-node2 systemd[1]: libpod-conmon-c3289e9173085a5b5a1a30bb43f9c80d177bc2b36a900afe4944f56de35a95ff.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-conmon-c3289e9173085a5b5a1a30bb43f9c80d177bc2b36a900afe4944f56de35a95ff.scope has successfully entered the 'dead' state. Jan 07 09:13:18 managed-node2 systemd[1]: Stopped libpod-conmon-c3289e9173085a5b5a1a30bb43f9c80d177bc2b36a900afe4944f56de35a95ff.scope. ░░ Subject: A stop job for unit libpod-conmon-c3289e9173085a5b5a1a30bb43f9c80d177bc2b36a900afe4944f56de35a95ff.scope has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit libpod-conmon-c3289e9173085a5b5a1a30bb43f9c80d177bc2b36a900afe4944f56de35a95ff.scope has finished. ░░ ░░ The job identifier is 2751 and the job result is done. Jan 07 09:13:18 managed-node2 systemd[1]: Removed slice machine-libpod_pod_ace8ef753c1dcf7138d1b0f4a0b2b78ed9e365871782b439684c6583797800ef.slice - cgroup machine-libpod_pod_ace8ef753c1dcf7138d1b0f4a0b2b78ed9e365871782b439684c6583797800ef.slice. ░░ Subject: A stop job for unit machine-libpod_pod_ace8ef753c1dcf7138d1b0f4a0b2b78ed9e365871782b439684c6583797800ef.slice has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit machine-libpod_pod_ace8ef753c1dcf7138d1b0f4a0b2b78ed9e365871782b439684c6583797800ef.slice has finished. ░░ ░░ The job identifier is 2750 and the job result is done. Jan 07 09:13:18 managed-node2 podman[29038]: 2025-01-07 09:13:18.727059126 -0500 EST m=+10.104736892 pod stop ace8ef753c1dcf7138d1b0f4a0b2b78ed9e365871782b439684c6583797800ef (image=, name=httpd3) Jan 07 09:13:18 managed-node2 systemd[1]: machine-libpod_pod_ace8ef753c1dcf7138d1b0f4a0b2b78ed9e365871782b439684c6583797800ef.slice: Failed to open /run/systemd/transient/machine-libpod_pod_ace8ef753c1dcf7138d1b0f4a0b2b78ed9e365871782b439684c6583797800ef.slice: No such file or directory Jan 07 09:13:18 managed-node2 podman[29038]: 2025-01-07 09:13:18.731012349 -0500 EST m=+10.108690120 pod stop ace8ef753c1dcf7138d1b0f4a0b2b78ed9e365871782b439684c6583797800ef (image=, name=httpd3) Jan 07 09:13:18 managed-node2 systemd[1]: machine-libpod_pod_ace8ef753c1dcf7138d1b0f4a0b2b78ed9e365871782b439684c6583797800ef.slice: Failed to open /run/systemd/transient/machine-libpod_pod_ace8ef753c1dcf7138d1b0f4a0b2b78ed9e365871782b439684c6583797800ef.slice: No such file or directory Jan 07 09:13:18 managed-node2 podman[29038]: 2025-01-07 09:13:18.752360105 -0500 EST m=+10.130037878 container remove c3289e9173085a5b5a1a30bb43f9c80d177bc2b36a900afe4944f56de35a95ff (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=ace8ef753c1dcf7138d1b0f4a0b2b78ed9e365871782b439684c6583797800ef, io.buildah.version=1.21.0, io.containers.autoupdate=registry, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jan 07 09:13:18 managed-node2 podman[29038]: 2025-01-07 09:13:18.777234648 -0500 EST m=+10.154912428 container remove 4e013f3b0cbbb21554ae27e004216ebf4bb63a7077f5fdffa4273b6aa7fadb80 (image=localhost/podman-pause:5.3.1-1733097600, name=ace8ef753c1d-infra, pod_id=ace8ef753c1dcf7138d1b0f4a0b2b78ed9e365871782b439684c6583797800ef, io.buildah.version=1.38.0) Jan 07 09:13:18 managed-node2 systemd[1]: machine-libpod_pod_ace8ef753c1dcf7138d1b0f4a0b2b78ed9e365871782b439684c6583797800ef.slice: Failed to open /run/systemd/transient/machine-libpod_pod_ace8ef753c1dcf7138d1b0f4a0b2b78ed9e365871782b439684c6583797800ef.slice: No such file or directory Jan 07 09:13:18 managed-node2 podman[29038]: 2025-01-07 09:13:18.784821843 -0500 EST m=+10.162499586 pod remove ace8ef753c1dcf7138d1b0f4a0b2b78ed9e365871782b439684c6583797800ef (image=, name=httpd3) Jan 07 09:13:18 managed-node2 podman[29038]: Pods stopped: Jan 07 09:13:18 managed-node2 podman[29038]: ace8ef753c1dcf7138d1b0f4a0b2b78ed9e365871782b439684c6583797800ef Jan 07 09:13:18 managed-node2 podman[29038]: Pods removed: Jan 07 09:13:18 managed-node2 podman[29038]: ace8ef753c1dcf7138d1b0f4a0b2b78ed9e365871782b439684c6583797800ef Jan 07 09:13:18 managed-node2 podman[29038]: Secrets removed: Jan 07 09:13:18 managed-node2 podman[29038]: Volumes removed: Jan 07 09:13:18 managed-node2 podman[29038]: 2025-01-07 09:13:18.805183764 -0500 EST m=+10.182861530 container create c8ea84d4fdcdc7f82778fa4c948f045584db3f8eb1d1f88bb9368e9dd0f27b31 (image=localhost/podman-pause:5.3.1-1733097600, name=aeb421c16034-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service) Jan 07 09:13:18 managed-node2 systemd[1]: Created slice machine-libpod_pod_3457d4d54eeca4fb47b37d6f37ab7caf65c7a6ab614c5bab1a90ac21f23bb7ee.slice - cgroup machine-libpod_pod_3457d4d54eeca4fb47b37d6f37ab7caf65c7a6ab614c5bab1a90ac21f23bb7ee.slice. ░░ Subject: A start job for unit machine-libpod_pod_3457d4d54eeca4fb47b37d6f37ab7caf65c7a6ab614c5bab1a90ac21f23bb7ee.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit machine-libpod_pod_3457d4d54eeca4fb47b37d6f37ab7caf65c7a6ab614c5bab1a90ac21f23bb7ee.slice has finished successfully. ░░ ░░ The job identifier is 2752. Jan 07 09:13:18 managed-node2 podman[29038]: 2025-01-07 09:13:18.8434908 -0500 EST m=+10.221168683 container create 0479d7891e3035920e5b465837b256f30fdc1e0fadf587d7dc25f03d4020988d (image=localhost/podman-pause:5.3.1-1733097600, name=3457d4d54eec-infra, pod_id=3457d4d54eeca4fb47b37d6f37ab7caf65c7a6ab614c5bab1a90ac21f23bb7ee, io.buildah.version=1.38.0, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service) Jan 07 09:13:18 managed-node2 podman[29038]: 2025-01-07 09:13:18.848043542 -0500 EST m=+10.225721306 pod create 3457d4d54eeca4fb47b37d6f37ab7caf65c7a6ab614c5bab1a90ac21f23bb7ee (image=, name=httpd3) Jan 07 09:13:18 managed-node2 podman[29038]: 2025-01-07 09:13:18.875470949 -0500 EST m=+10.253148743 container create 4bd23d3a55ea101d4f9a990a1aac44af242e338ceb6e025bbcc47b967d7d1b6b (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=3457d4d54eeca4fb47b37d6f37ab7caf65c7a6ab614c5bab1a90ac21f23bb7ee, created_by=test/system/build-testimage, io.buildah.version=1.21.0, app=test, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service, created_at=2021-06-10T18:55:36Z) Jan 07 09:13:18 managed-node2 podman[29038]: 2025-01-07 09:13:18.875833848 -0500 EST m=+10.253511627 container restart c8ea84d4fdcdc7f82778fa4c948f045584db3f8eb1d1f88bb9368e9dd0f27b31 (image=localhost/podman-pause:5.3.1-1733097600, name=aeb421c16034-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service) Jan 07 09:13:18 managed-node2 podman[29038]: 2025-01-07 09:13:18.850196796 -0500 EST m=+10.227874699 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610 Jan 07 09:13:18 managed-node2 systemd[1]: Started libpod-c8ea84d4fdcdc7f82778fa4c948f045584db3f8eb1d1f88bb9368e9dd0f27b31.scope - libcrun container. ░░ Subject: A start job for unit libpod-c8ea84d4fdcdc7f82778fa4c948f045584db3f8eb1d1f88bb9368e9dd0f27b31.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-c8ea84d4fdcdc7f82778fa4c948f045584db3f8eb1d1f88bb9368e9dd0f27b31.scope has finished successfully. ░░ ░░ The job identifier is 2758. Jan 07 09:13:18 managed-node2 podman[29038]: 2025-01-07 09:13:18.9282523 -0500 EST m=+10.305930108 container init c8ea84d4fdcdc7f82778fa4c948f045584db3f8eb1d1f88bb9368e9dd0f27b31 (image=localhost/podman-pause:5.3.1-1733097600, name=aeb421c16034-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service) Jan 07 09:13:18 managed-node2 podman[29038]: 2025-01-07 09:13:18.930560751 -0500 EST m=+10.308238599 container start c8ea84d4fdcdc7f82778fa4c948f045584db3f8eb1d1f88bb9368e9dd0f27b31 (image=localhost/podman-pause:5.3.1-1733097600, name=aeb421c16034-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service) Jan 07 09:13:18 managed-node2 kernel: podman1: port 2(veth1) entered blocking state Jan 07 09:13:18 managed-node2 kernel: podman1: port 2(veth1) entered disabled state Jan 07 09:13:18 managed-node2 kernel: veth1: entered allmulticast mode Jan 07 09:13:18 managed-node2 kernel: veth1: entered promiscuous mode Jan 07 09:13:18 managed-node2 kernel: podman1: port 2(veth1) entered blocking state Jan 07 09:13:18 managed-node2 kernel: podman1: port 2(veth1) entered forwarding state Jan 07 09:13:18 managed-node2 NetworkManager[780]: [1736259198.9608] manager: (veth1): new Veth device (/org/freedesktop/NetworkManager/Devices/8) Jan 07 09:13:18 managed-node2 NetworkManager[780]: [1736259198.9622] device (veth1): carrier: link connected Jan 07 09:13:18 managed-node2 (udev-worker)[29083]: Network interface NamePolicy= disabled on kernel command line. Jan 07 09:13:19 managed-node2 systemd[1]: Started libpod-0479d7891e3035920e5b465837b256f30fdc1e0fadf587d7dc25f03d4020988d.scope - libcrun container. ░░ Subject: A start job for unit libpod-0479d7891e3035920e5b465837b256f30fdc1e0fadf587d7dc25f03d4020988d.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-0479d7891e3035920e5b465837b256f30fdc1e0fadf587d7dc25f03d4020988d.scope has finished successfully. ░░ ░░ The job identifier is 2765. Jan 07 09:13:19 managed-node2 podman[29038]: 2025-01-07 09:13:19.047971575 -0500 EST m=+10.425649470 container init 0479d7891e3035920e5b465837b256f30fdc1e0fadf587d7dc25f03d4020988d (image=localhost/podman-pause:5.3.1-1733097600, name=3457d4d54eec-infra, pod_id=3457d4d54eeca4fb47b37d6f37ab7caf65c7a6ab614c5bab1a90ac21f23bb7ee, io.buildah.version=1.38.0, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service) Jan 07 09:13:19 managed-node2 podman[29038]: 2025-01-07 09:13:19.050915982 -0500 EST m=+10.428594064 container start 0479d7891e3035920e5b465837b256f30fdc1e0fadf587d7dc25f03d4020988d (image=localhost/podman-pause:5.3.1-1733097600, name=3457d4d54eec-infra, pod_id=3457d4d54eeca4fb47b37d6f37ab7caf65c7a6ab614c5bab1a90ac21f23bb7ee, io.buildah.version=1.38.0, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service) Jan 07 09:13:19 managed-node2 systemd[1]: Started libpod-4bd23d3a55ea101d4f9a990a1aac44af242e338ceb6e025bbcc47b967d7d1b6b.scope - libcrun container. ░░ Subject: A start job for unit libpod-4bd23d3a55ea101d4f9a990a1aac44af242e338ceb6e025bbcc47b967d7d1b6b.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-4bd23d3a55ea101d4f9a990a1aac44af242e338ceb6e025bbcc47b967d7d1b6b.scope has finished successfully. ░░ ░░ The job identifier is 2772. Jan 07 09:13:19 managed-node2 podman[29038]: 2025-01-07 09:13:19.097276903 -0500 EST m=+10.474954798 container init 4bd23d3a55ea101d4f9a990a1aac44af242e338ceb6e025bbcc47b967d7d1b6b (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=3457d4d54eeca4fb47b37d6f37ab7caf65c7a6ab614c5bab1a90ac21f23bb7ee, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jan 07 09:13:19 managed-node2 podman[29038]: 2025-01-07 09:13:19.099639667 -0500 EST m=+10.477317549 container start 4bd23d3a55ea101d4f9a990a1aac44af242e338ceb6e025bbcc47b967d7d1b6b (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=3457d4d54eeca4fb47b37d6f37ab7caf65c7a6ab614c5bab1a90ac21f23bb7ee, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jan 07 09:13:19 managed-node2 podman[29038]: 2025-01-07 09:13:19.103452832 -0500 EST m=+10.481130602 pod start 3457d4d54eeca4fb47b37d6f37ab7caf65c7a6ab614c5bab1a90ac21f23bb7ee (image=, name=httpd3) Jan 07 09:13:19 managed-node2 podman[29038]: Pod: Jan 07 09:13:19 managed-node2 podman[29038]: 3457d4d54eeca4fb47b37d6f37ab7caf65c7a6ab614c5bab1a90ac21f23bb7ee Jan 07 09:13:19 managed-node2 podman[29038]: Container: Jan 07 09:13:19 managed-node2 podman[29038]: 4bd23d3a55ea101d4f9a990a1aac44af242e338ceb6e025bbcc47b967d7d1b6b Jan 07 09:13:19 managed-node2 systemd[1]: Started podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service - A template for running K8s workloads via podman-kube-play. ░░ Subject: A start job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service has finished successfully. ░░ ░░ The job identifier is 2665. Jan 07 09:13:19 managed-node2 sudo[29289]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-pnmkbnzuldsvzobphkxtjasvboghknnr ; /usr/bin/python3.12 /var/tmp/ansible-tmp-1736259199.4684494-17727-168682880762240/AnsiballZ_command.py' Jan 07 09:13:19 managed-node2 sudo[29289]: pam_systemd(sudo:session): New sd-bus connection (system-bus-pam-systemd-29289) opened. Jan 07 09:13:19 managed-node2 sudo[29289]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0) Jan 07 09:13:19 managed-node2 python3.12[29292]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod inspect httpd1 --format '{{.State}}' _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 07 09:13:19 managed-node2 systemd[23047]: Started podman-29300.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 112. Jan 07 09:13:19 managed-node2 sudo[29289]: pam_unix(sudo:session): session closed for user podman_basic_user Jan 07 09:13:20 managed-node2 python3.12[29439]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod inspect httpd2 --format '{{.State}}' _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 07 09:13:20 managed-node2 python3.12[29578]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod inspect httpd3 --format '{{.State}}' _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 07 09:13:20 managed-node2 sudo[29759]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-vsldhxkafmrvalmibizzajqfspywsjxm ; XDG_RUNTIME_DIR=/run/user/3001 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736259200.8099418-17783-63529375764264/AnsiballZ_command.py' Jan 07 09:13:21 managed-node2 sudo[29759]: pam_systemd(sudo:session): New sd-bus connection (system-bus-pam-systemd-29759) opened. Jan 07 09:13:21 managed-node2 sudo[29759]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0) Jan 07 09:13:21 managed-node2 python3.12[29762]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail systemctl --user list-units -a -l --plain | grep -E '^[ ]*podman-kube@.+-httpd1[.]yml[.]service[ ]+loaded[ ]+active ' _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 07 09:13:21 managed-node2 sudo[29759]: pam_unix(sudo:session): session closed for user podman_basic_user Jan 07 09:13:21 managed-node2 python3.12[29896]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail systemctl --system list-units -a -l --plain | grep -E '^[ ]*podman-kube@.+-httpd2[.]yml[.]service[ ]+loaded[ ]+active ' _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 07 09:13:21 managed-node2 python3.12[30030]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail systemctl --system list-units -a -l --plain | grep -E '^[ ]*podman-kube@.+-httpd3[.]yml[.]service[ ]+loaded[ ]+active ' _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 07 09:13:22 managed-node2 python3.12[30164]: ansible-ansible.legacy.uri Invoked with url=http://localhost:15001/index.txt return_content=True force=False http_agent=ansible-httpget use_proxy=True validate_certs=True force_basic_auth=False use_gssapi=False body_format=raw method=GET follow_redirects=safe status_code=[200] timeout=30 headers={} remote_src=False unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None dest=None body=None src=None creates=None removes=None unix_socket=None ca_path=None ciphers=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 07 09:13:23 managed-node2 python3.12[30297]: ansible-ansible.legacy.uri Invoked with url=http://localhost:15002/index.txt return_content=True force=False http_agent=ansible-httpget use_proxy=True validate_certs=True force_basic_auth=False use_gssapi=False body_format=raw method=GET follow_redirects=safe status_code=[200] timeout=30 headers={} remote_src=False unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None dest=None body=None src=None creates=None removes=None unix_socket=None ca_path=None ciphers=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 07 09:13:23 managed-node2 python3.12[30428]: ansible-ansible.legacy.command Invoked with _raw_params=journalctl -ex _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 07 09:13:24 managed-node2 python3.12[30560]: ansible-file Invoked with path=/etc/containers/storage.conf state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 07 09:13:24 managed-node2 python3.12[30691]: ansible-file Invoked with path=/tmp/lsr_kom0gs4j_podman state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 07 09:13:27 managed-node2 python3.12[30865]: ansible-ansible.legacy.setup Invoked with gather_subset=['all'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d Jan 07 09:13:29 managed-node2 python3.12[31038]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 07 09:13:30 managed-node2 python3.12[31169]: ansible-ansible.legacy.dnf Invoked with name=['python3-pyasn1', 'python3-cryptography', 'python3-dbus'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 07 09:13:32 managed-node2 python3.12[31305]: ansible-ansible.legacy.dnf Invoked with name=['certmonger', 'python3-packaging'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 07 09:13:34 managed-node2 dbus-broker-launch[619]: Noticed file-system modification, trigger reload. ░░ Subject: A configuration directory was written to ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ A write was detected to one of the directories containing D-Bus configuration ░░ files, triggering a configuration reload. ░░ ░░ This functionality exists for backwards compatibility to pick up changes to ░░ D-Bus configuration without an explicit reolad request. Typically when ░░ installing or removing third-party software causes D-Bus configuration files ░░ to be added or removed. ░░ ░░ It is worth noting that this may cause partial configuration to be loaded in ░░ case dispatching this notification races with the writing of the configuration ░░ files. However, a future notification will then cause the configuration to be ░░ reladed again. Jan 07 09:13:34 managed-node2 dbus-broker-launch[23523]: Noticed file-system modification, trigger reload. ░░ Subject: A configuration directory was written to ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ A write was detected to one of the directories containing D-Bus configuration ░░ files, triggering a configuration reload. ░░ ░░ This functionality exists for backwards compatibility to pick up changes to ░░ D-Bus configuration without an explicit reolad request. Typically when ░░ installing or removing third-party software causes D-Bus configuration files ░░ to be added or removed. ░░ ░░ It is worth noting that this may cause partial configuration to be loaded in ░░ case dispatching this notification races with the writing of the configuration ░░ files. However, a future notification will then cause the configuration to be ░░ reladed again. Jan 07 09:13:34 managed-node2 dbus-broker-launch[619]: Noticed file-system modification, trigger reload. ░░ Subject: A configuration directory was written to ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ A write was detected to one of the directories containing D-Bus configuration ░░ files, triggering a configuration reload. ░░ ░░ This functionality exists for backwards compatibility to pick up changes to ░░ D-Bus configuration without an explicit reolad request. Typically when ░░ installing or removing third-party software causes D-Bus configuration files ░░ to be added or removed. ░░ ░░ It is worth noting that this may cause partial configuration to be loaded in ░░ case dispatching this notification races with the writing of the configuration ░░ files. However, a future notification will then cause the configuration to be ░░ reladed again. Jan 07 09:13:34 managed-node2 dbus-broker-launch[23523]: Policy to allow eavesdropping in /usr/share/dbus-1/session.conf +31: Eavesdropping is deprecated and ignored Jan 07 09:13:34 managed-node2 dbus-broker-launch[23523]: Policy to allow eavesdropping in /usr/share/dbus-1/session.conf +33: Eavesdropping is deprecated and ignored Jan 07 09:13:34 managed-node2 dbus-broker-launch[619]: Noticed file-system modification, trigger reload. ░░ Subject: A configuration directory was written to ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ A write was detected to one of the directories containing D-Bus configuration ░░ files, triggering a configuration reload. ░░ ░░ This functionality exists for backwards compatibility to pick up changes to ░░ D-Bus configuration without an explicit reolad request. Typically when ░░ installing or removing third-party software causes D-Bus configuration files ░░ to be added or removed. ░░ ░░ It is worth noting that this may cause partial configuration to be loaded in ░░ case dispatching this notification races with the writing of the configuration ░░ files. However, a future notification will then cause the configuration to be ░░ reladed again. Jan 07 09:13:34 managed-node2 dbus-broker-launch[23523]: Noticed file-system modification, trigger reload. ░░ Subject: A configuration directory was written to ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ A write was detected to one of the directories containing D-Bus configuration ░░ files, triggering a configuration reload. ░░ ░░ This functionality exists for backwards compatibility to pick up changes to ░░ D-Bus configuration without an explicit reolad request. Typically when ░░ installing or removing third-party software causes D-Bus configuration files ░░ to be added or removed. ░░ ░░ It is worth noting that this may cause partial configuration to be loaded in ░░ case dispatching this notification races with the writing of the configuration ░░ files. However, a future notification will then cause the configuration to be ░░ reladed again. Jan 07 09:13:34 managed-node2 dbus-broker-launch[23523]: Policy to allow eavesdropping in /usr/share/dbus-1/session.conf +31: Eavesdropping is deprecated and ignored Jan 07 09:13:34 managed-node2 dbus-broker-launch[23523]: Policy to allow eavesdropping in /usr/share/dbus-1/session.conf +33: Eavesdropping is deprecated and ignored Jan 07 09:13:34 managed-node2 systemd[1]: Reload requested from client PID 31313 ('systemctl') (unit session-6.scope)... Jan 07 09:13:34 managed-node2 systemd[1]: Reloading... Jan 07 09:13:34 managed-node2 systemd[1]: Reloading finished in 217 ms. Jan 07 09:13:34 managed-node2 systemd[1]: Started run-r5293c8ab6057453592d98f0dcc4cfa58.service - /usr/bin/systemctl start man-db-cache-update. ░░ Subject: A start job for unit run-r5293c8ab6057453592d98f0dcc4cfa58.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit run-r5293c8ab6057453592d98f0dcc4cfa58.service has finished successfully. ░░ ░░ The job identifier is 2783. Jan 07 09:13:34 managed-node2 systemd[1]: Starting man-db-cache-update.service... ░░ Subject: A start job for unit man-db-cache-update.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit man-db-cache-update.service has begun execution. ░░ ░░ The job identifier is 2861. Jan 07 09:13:34 managed-node2 systemd[1]: Reload requested from client PID 31378 ('systemctl') (unit session-6.scope)... Jan 07 09:13:34 managed-node2 systemd[1]: Reloading... Jan 07 09:13:34 managed-node2 systemd[1]: Reloading finished in 363 ms. Jan 07 09:13:35 managed-node2 systemd[1]: Queuing reload/restart jobs for marked units… Jan 07 09:13:35 managed-node2 systemd[1]: man-db-cache-update.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit man-db-cache-update.service has successfully entered the 'dead' state. Jan 07 09:13:35 managed-node2 systemd[1]: Finished man-db-cache-update.service. ░░ Subject: A start job for unit man-db-cache-update.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit man-db-cache-update.service has finished successfully. ░░ ░░ The job identifier is 2861. Jan 07 09:13:35 managed-node2 systemd[1]: run-r5293c8ab6057453592d98f0dcc4cfa58.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-r5293c8ab6057453592d98f0dcc4cfa58.service has successfully entered the 'dead' state. Jan 07 09:13:36 managed-node2 python3.12[31571]: ansible-file Invoked with name=/etc/certmonger//pre-scripts owner=root group=root mode=0700 state=directory path=/etc/certmonger//pre-scripts recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 07 09:13:36 managed-node2 python3.12[31702]: ansible-file Invoked with name=/etc/certmonger//post-scripts owner=root group=root mode=0700 state=directory path=/etc/certmonger//post-scripts recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 07 09:13:37 managed-node2 python3.12[31833]: ansible-ansible.legacy.systemd Invoked with name=certmonger state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Jan 07 09:13:37 managed-node2 systemd[1]: Reload requested from client PID 31836 ('systemctl') (unit session-6.scope)... Jan 07 09:13:37 managed-node2 systemd[1]: Reloading... Jan 07 09:13:37 managed-node2 systemd[1]: Reloading finished in 214 ms. Jan 07 09:13:37 managed-node2 systemd[1]: Starting certmonger.service - Certificate monitoring and PKI enrollment... ░░ Subject: A start job for unit certmonger.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit certmonger.service has begun execution. ░░ ░░ The job identifier is 2939. Jan 07 09:13:37 managed-node2 (rtmonger)[31893]: certmonger.service: Referenced but unset environment variable evaluates to an empty string: OPTS Jan 07 09:13:37 managed-node2 systemd[1]: Started certmonger.service - Certificate monitoring and PKI enrollment. ░░ Subject: A start job for unit certmonger.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit certmonger.service has finished successfully. ░░ ░░ The job identifier is 2939. Jan 07 09:13:38 managed-node2 python3.12[32051]: ansible-fedora.linux_system_roles.certificate_request Invoked with name=quadlet_demo dns=['localhost'] directory=/etc/pki/tls wait=True ca=self-sign __header=# # Ansible managed # # system_role:certificate provider_config_directory=/etc/certmonger provider=certmonger key_usage=['digitalSignature', 'keyEncipherment'] extended_key_usage=['id-kp-serverAuth', 'id-kp-clientAuth'] auto_renew=True ip=None email=None common_name=None country=None state=None locality=None organization=None organizational_unit=None contact_email=None key_size=None owner=None group=None mode=None principal=None run_before=None run_after=None Jan 07 09:13:38 managed-node2 certmonger[31893]: 2025-01-07 09:13:38 [31893] Wrote to /var/lib/certmonger/requests/20250107141338 Jan 07 09:13:38 managed-node2 certmonger[31893]: 2025-01-07 09:13:38 [31893] Wrote to /var/lib/certmonger/requests/20250107141338 Jan 07 09:13:38 managed-node2 certmonger[31893]: 2025-01-07 09:13:38 [31893] Wrote to /var/lib/certmonger/requests/20250107141338 Jan 07 09:13:38 managed-node2 certmonger[31893]: 2025-01-07 09:13:38 [31893] Wrote to /var/lib/certmonger/requests/20250107141338 Jan 07 09:13:38 managed-node2 certmonger[31893]: 2025-01-07 09:13:38 [31893] Wrote to /var/lib/certmonger/requests/20250107141338 Jan 07 09:13:38 managed-node2 certmonger[31893]: 2025-01-07 09:13:38 [31893] Wrote to /var/lib/certmonger/requests/20250107141338 Jan 07 09:13:38 managed-node2 certmonger[31893]: 2025-01-07 09:13:38 [31893] Wrote to /var/lib/certmonger/requests/20250107141338 Jan 07 09:13:38 managed-node2 certmonger[31893]: 2025-01-07 09:13:38 [31893] Wrote to /var/lib/certmonger/requests/20250107141338 Jan 07 09:13:38 managed-node2 certmonger[31893]: 2025-01-07 09:13:38 [31893] Wrote to /var/lib/certmonger/requests/20250107141338 Jan 07 09:13:38 managed-node2 rsyslogd[656]: imjournal: journal files changed, reloading... [v8.2408.0-2.el10 try https://www.rsyslog.com/e/0 ] Jan 07 09:13:38 managed-node2 certmonger[31893]: 2025-01-07 09:13:38 [31893] Wrote to /var/lib/certmonger/requests/20250107141338 Jan 07 09:13:38 managed-node2 certmonger[31893]: 2025-01-07 09:13:38 [31893] Wrote to /var/lib/certmonger/requests/20250107141338 Jan 07 09:13:38 managed-node2 certmonger[31893]: 2025-01-07 09:13:38 [31893] Wrote to /var/lib/certmonger/requests/20250107141338 Jan 07 09:13:38 managed-node2 certmonger[31893]: 2025-01-07 09:13:38 [31893] Wrote to /var/lib/certmonger/requests/20250107141338 Jan 07 09:13:38 managed-node2 certmonger[31893]: 2025-01-07 09:13:38 [31893] Wrote to /var/lib/certmonger/requests/20250107141338 Jan 07 09:13:38 managed-node2 certmonger[31893]: 2025-01-07 09:13:38 [31893] Wrote to /var/lib/certmonger/requests/20250107141338 Jan 07 09:13:38 managed-node2 certmonger[31893]: 2025-01-07 09:13:38 [31893] Wrote to /var/lib/certmonger/requests/20250107141338 Jan 07 09:13:38 managed-node2 certmonger[31893]: 2025-01-07 09:13:38 [31893] Wrote to /var/lib/certmonger/requests/20250107141338 Jan 07 09:13:38 managed-node2 certmonger[31893]: 2025-01-07 09:13:38 [31893] Wrote to /var/lib/certmonger/requests/20250107141338 Jan 07 09:13:38 managed-node2 certmonger[31893]: 2025-01-07 09:13:38 [31893] Wrote to /var/lib/certmonger/requests/20250107141338 Jan 07 09:13:38 managed-node2 certmonger[31893]: 2025-01-07 09:13:38 [31893] Wrote to /var/lib/certmonger/requests/20250107141338 Jan 07 09:13:38 managed-node2 certmonger[31893]: 2025-01-07 09:13:38 [31893] Wrote to /var/lib/certmonger/requests/20250107141338 Jan 07 09:13:38 managed-node2 certmonger[31893]: 2025-01-07 09:13:38 [31893] Wrote to /var/lib/certmonger/requests/20250107141338 Jan 07 09:13:38 managed-node2 certmonger[31893]: 2025-01-07 09:13:38 [31893] Wrote to /var/lib/certmonger/requests/20250107141338 Jan 07 09:13:38 managed-node2 certmonger[31893]: 2025-01-07 09:13:38 [31893] Wrote to /var/lib/certmonger/requests/20250107141338 Jan 07 09:13:38 managed-node2 certmonger[31893]: 2025-01-07 09:13:38 [31893] Wrote to /var/lib/certmonger/requests/20250107141338 Jan 07 09:13:38 managed-node2 certmonger[31893]: 2025-01-07 09:13:38 [31893] Wrote to /var/lib/certmonger/requests/20250107141338 Jan 07 09:13:38 managed-node2 certmonger[31893]: 2025-01-07 09:13:38 [31893] Wrote to /var/lib/certmonger/requests/20250107141338 Jan 07 09:13:38 managed-node2 certmonger[31893]: 2025-01-07 09:13:38 [31893] Wrote to /var/lib/certmonger/requests/20250107141338 Jan 07 09:13:38 managed-node2 certmonger[31893]: 2025-01-07 09:13:38 [31893] Wrote to /var/lib/certmonger/requests/20250107141338 Jan 07 09:13:38 managed-node2 certmonger[32067]: Certificate in file "/etc/pki/tls/certs/quadlet_demo.crt" issued by CA and saved. Jan 07 09:13:38 managed-node2 certmonger[31893]: 2025-01-07 09:13:38 [31893] Wrote to /var/lib/certmonger/requests/20250107141338 Jan 07 09:13:39 managed-node2 python3.12[32198]: ansible-slurp Invoked with path=/etc/pki/tls/certs/quadlet_demo.crt src=/etc/pki/tls/certs/quadlet_demo.crt Jan 07 09:13:39 managed-node2 python3.12[32329]: ansible-slurp Invoked with path=/etc/pki/tls/private/quadlet_demo.key src=/etc/pki/tls/private/quadlet_demo.key Jan 07 09:13:39 managed-node2 python3.12[32460]: ansible-slurp Invoked with path=/etc/pki/tls/certs/quadlet_demo.crt src=/etc/pki/tls/certs/quadlet_demo.crt Jan 07 09:13:40 managed-node2 python3.12[32591]: ansible-ansible.legacy.command Invoked with _raw_params=getcert stop-tracking -f /etc/pki/tls/certs/quadlet_demo.crt _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 07 09:13:40 managed-node2 certmonger[31893]: 2025-01-07 09:13:40 [31893] Wrote to /var/lib/certmonger/requests/20250107141338 Jan 07 09:13:40 managed-node2 python3.12[32723]: ansible-file Invoked with path=/etc/pki/tls/certs/quadlet_demo.crt state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 07 09:13:41 managed-node2 python3.12[32854]: ansible-file Invoked with path=/etc/pki/tls/private/quadlet_demo.key state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 07 09:13:41 managed-node2 python3.12[32985]: ansible-file Invoked with path=/etc/pki/tls/certs/quadlet_demo.crt state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 07 09:13:42 managed-node2 python3.12[33116]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 07 09:13:42 managed-node2 python3.12[33247]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 07 09:13:44 managed-node2 python3.12[33509]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 07 09:13:45 managed-node2 python3.12[33646]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None Jan 07 09:13:45 managed-node2 python3.12[33778]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 07 09:13:47 managed-node2 python3.12[33911]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 07 09:13:48 managed-node2 python3.12[34042]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 07 09:13:48 managed-node2 python3.12[34173]: ansible-ansible.legacy.dnf Invoked with name=['firewalld'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 07 09:13:49 managed-node2 python3.12[34305]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False scope=system no_block=False state=None enabled=None force=None Jan 07 09:13:50 managed-node2 python3.12[34438]: ansible-ansible.legacy.systemd Invoked with name=firewalld state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Jan 07 09:13:50 managed-node2 python3.12[34571]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['8000/tcp'] permanent=True runtime=True state=enabled __report_changed=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Jan 07 09:13:51 managed-node2 python3.12[34702]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['9000/tcp'] permanent=True runtime=True state=enabled __report_changed=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Jan 07 09:13:57 managed-node2 python3.12[35313]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 07 09:13:59 managed-node2 python3.12[35446]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 07 09:13:59 managed-node2 python3.12[35577]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/quadlet-demo.network follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 07 09:14:00 managed-node2 python3.12[35682]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1736259239.349858-19473-42692965240365/.source.network dest=/etc/containers/systemd/quadlet-demo.network owner=root group=0 mode=0644 _original_basename=quadlet-demo.network follow=False checksum=e57c08d49aff4bae8daab138d913aeddaa8682a0 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jan 07 09:14:00 managed-node2 python3.12[35813]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 07 09:14:00 managed-node2 systemd[1]: Reload requested from client PID 35814 ('systemctl') (unit session-6.scope)... Jan 07 09:14:00 managed-node2 systemd[1]: Reloading... Jan 07 09:14:00 managed-node2 systemd[1]: Reloading finished in 213 ms. Jan 07 09:14:01 managed-node2 python3.12[36001]: ansible-systemd Invoked with name=quadlet-demo-network.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Jan 07 09:14:01 managed-node2 systemd[1]: Starting quadlet-demo-network.service... ░░ Subject: A start job for unit quadlet-demo-network.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-demo-network.service has begun execution. ░░ ░░ The job identifier is 3018. Jan 07 09:14:01 managed-node2 quadlet-demo-network[36005]: systemd-quadlet-demo Jan 07 09:14:01 managed-node2 systemd[1]: Finished quadlet-demo-network.service. ░░ Subject: A start job for unit quadlet-demo-network.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-demo-network.service has finished successfully. ░░ ░░ The job identifier is 3018. Jan 07 09:14:02 managed-node2 python3.12[36143]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 07 09:14:04 managed-node2 python3.12[36276]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 07 09:14:05 managed-node2 python3.12[36407]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/quadlet-demo-mysql.volume follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 07 09:14:05 managed-node2 python3.12[36512]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1736259244.858735-19722-68882637510749/.source.volume dest=/etc/containers/systemd/quadlet-demo-mysql.volume owner=root group=0 mode=0644 _original_basename=quadlet-demo-mysql.volume follow=False checksum=585f8cbdf0ec73000f9227dcffbef71e9552ea4a backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jan 07 09:14:06 managed-node2 python3.12[36643]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 07 09:14:06 managed-node2 systemd[1]: Reload requested from client PID 36644 ('systemctl') (unit session-6.scope)... Jan 07 09:14:06 managed-node2 systemd[1]: Reloading... Jan 07 09:14:06 managed-node2 systemd[1]: Reloading finished in 218 ms. Jan 07 09:14:06 managed-node2 python3.12[36830]: ansible-systemd Invoked with name=quadlet-demo-mysql-volume.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Jan 07 09:14:06 managed-node2 systemd[1]: Starting quadlet-demo-mysql-volume.service... ░░ Subject: A start job for unit quadlet-demo-mysql-volume.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-demo-mysql-volume.service has begun execution. ░░ ░░ The job identifier is 3102. Jan 07 09:14:07 managed-node2 podman[36834]: 2025-01-07 09:14:07.042513615 -0500 EST m=+0.026104344 volume create systemd-quadlet-demo-mysql Jan 07 09:14:07 managed-node2 quadlet-demo-mysql-volume[36834]: systemd-quadlet-demo-mysql Jan 07 09:14:07 managed-node2 systemd[1]: Finished quadlet-demo-mysql-volume.service. ░░ Subject: A start job for unit quadlet-demo-mysql-volume.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-demo-mysql-volume.service has finished successfully. ░░ ░░ The job identifier is 3102. Jan 07 09:14:08 managed-node2 python3.12[36973]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 07 09:14:09 managed-node2 python3.12[37106]: ansible-file Invoked with path=/tmp/quadlet_demo state=directory owner=root group=root mode=0777 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 07 09:14:18 managed-node2 podman[37245]: 2025-01-07 09:14:18.091301948 -0500 EST m=+7.582714957 image pull dd3b2a5dcb48ff61113592ed5ddd762581be4387c7bc552375a2159422aa6bf5 quay.io/linux-system-roles/mysql:5.6 Jan 07 09:14:18 managed-node2 python3.12[37555]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 07 09:14:19 managed-node2 python3.12[37686]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/quadlet-demo-mysql.container follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 07 09:14:19 managed-node2 python3.12[37791]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/systemd/quadlet-demo-mysql.container owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1736259258.759416-20427-10094735080069/.source.container _original_basename=.fjkuewtn follow=False checksum=ca62b2ad3cc9afb5b5371ebbf797b9bc4fd7edd4 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jan 07 09:14:19 managed-node2 python3.12[37922]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 07 09:14:20 managed-node2 systemd[1]: Reload requested from client PID 37923 ('systemctl') (unit session-6.scope)... Jan 07 09:14:20 managed-node2 systemd[1]: Reloading... Jan 07 09:14:20 managed-node2 systemd[1]: Reloading finished in 220 ms. Jan 07 09:14:20 managed-node2 python3.12[38109]: ansible-systemd Invoked with name=quadlet-demo-mysql.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Jan 07 09:14:20 managed-node2 systemd[1]: Starting quadlet-demo-mysql.service... ░░ Subject: A start job for unit quadlet-demo-mysql.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-demo-mysql.service has begun execution. ░░ ░░ The job identifier is 3186. Jan 07 09:14:20 managed-node2 podman[38113]: 2025-01-07 09:14:20.979510446 -0500 EST m=+0.045287997 container create c22d23c0f7b46cca50b2c4b3810f209784c1310f514d6666ef9df66132609bfd (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service) Jan 07 09:14:20 managed-node2 systemd[23047]: Starting grub-boot-success.service - Mark boot as successful... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 118. Jan 07 09:14:21 managed-node2 NetworkManager[780]: [1736259261.0073] manager: (podman2): new Bridge device (/org/freedesktop/NetworkManager/Devices/9) Jan 07 09:14:21 managed-node2 kernel: podman2: port 1(veth2) entered blocking state Jan 07 09:14:21 managed-node2 kernel: podman2: port 1(veth2) entered disabled state Jan 07 09:14:21 managed-node2 kernel: veth2: entered allmulticast mode Jan 07 09:14:21 managed-node2 kernel: veth2: entered promiscuous mode Jan 07 09:14:21 managed-node2 NetworkManager[780]: [1736259261.0173] manager: (veth2): new Veth device (/org/freedesktop/NetworkManager/Devices/10) Jan 07 09:14:21 managed-node2 kernel: podman2: port 1(veth2) entered blocking state Jan 07 09:14:21 managed-node2 kernel: podman2: port 1(veth2) entered forwarding state Jan 07 09:14:21 managed-node2 systemd[23047]: Finished grub-boot-success.service - Mark boot as successful. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 118. Jan 07 09:14:21 managed-node2 NetworkManager[780]: [1736259261.0235] device (veth2): carrier: link connected Jan 07 09:14:21 managed-node2 NetworkManager[780]: [1736259261.0244] device (podman2): carrier: link connected Jan 07 09:14:21 managed-node2 (udev-worker)[38130]: Network interface NamePolicy= disabled on kernel command line. Jan 07 09:14:21 managed-node2 (udev-worker)[38129]: Network interface NamePolicy= disabled on kernel command line. Jan 07 09:14:21 managed-node2 NetworkManager[780]: [1736259261.0614] device (podman2): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external') Jan 07 09:14:21 managed-node2 NetworkManager[780]: [1736259261.0625] device (podman2): state change: unavailable -> disconnected (reason 'connection-assumed', managed-type: 'external') Jan 07 09:14:21 managed-node2 NetworkManager[780]: [1736259261.0631] device (podman2): Activation: starting connection 'podman2' (c24cf0c5-78fa-46c2-ae4b-64bb2b37c6d1) Jan 07 09:14:21 managed-node2 NetworkManager[780]: [1736259261.0633] device (podman2): state change: disconnected -> prepare (reason 'none', managed-type: 'external') Jan 07 09:14:21 managed-node2 NetworkManager[780]: [1736259261.0635] device (podman2): state change: prepare -> config (reason 'none', managed-type: 'external') Jan 07 09:14:21 managed-node2 NetworkManager[780]: [1736259261.0636] device (podman2): state change: config -> ip-config (reason 'none', managed-type: 'external') Jan 07 09:14:21 managed-node2 NetworkManager[780]: [1736259261.0638] device (podman2): state change: ip-config -> ip-check (reason 'none', managed-type: 'external') Jan 07 09:14:21 managed-node2 podman[38113]: 2025-01-07 09:14:20.959664842 -0500 EST m=+0.025442508 image pull dd3b2a5dcb48ff61113592ed5ddd762581be4387c7bc552375a2159422aa6bf5 quay.io/linux-system-roles/mysql:5.6 Jan 07 09:14:21 managed-node2 systemd[1]: Starting NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service... ░░ Subject: A start job for unit NetworkManager-dispatcher.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has begun execution. ░░ ░░ The job identifier is 3273. Jan 07 09:14:21 managed-node2 systemd[1]: Started NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service. ░░ Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has finished successfully. ░░ ░░ The job identifier is 3273. Jan 07 09:14:21 managed-node2 NetworkManager[780]: [1736259261.1115] device (podman2): state change: ip-check -> secondaries (reason 'none', managed-type: 'external') Jan 07 09:14:21 managed-node2 NetworkManager[780]: [1736259261.1119] device (podman2): state change: secondaries -> activated (reason 'none', managed-type: 'external') Jan 07 09:14:21 managed-node2 NetworkManager[780]: [1736259261.1124] device (podman2): Activation: successful, device activated. Jan 07 09:14:21 managed-node2 systemd[1]: Started c22d23c0f7b46cca50b2c4b3810f209784c1310f514d6666ef9df66132609bfd-7ac923f5855b5f9d.timer - /usr/bin/podman healthcheck run c22d23c0f7b46cca50b2c4b3810f209784c1310f514d6666ef9df66132609bfd. ░░ Subject: A start job for unit c22d23c0f7b46cca50b2c4b3810f209784c1310f514d6666ef9df66132609bfd-7ac923f5855b5f9d.timer has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit c22d23c0f7b46cca50b2c4b3810f209784c1310f514d6666ef9df66132609bfd-7ac923f5855b5f9d.timer has finished successfully. ░░ ░░ The job identifier is 3352. Jan 07 09:14:21 managed-node2 podman[38113]: 2025-01-07 09:14:21.192827482 -0500 EST m=+0.258605137 container init c22d23c0f7b46cca50b2c4b3810f209784c1310f514d6666ef9df66132609bfd (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service) Jan 07 09:14:21 managed-node2 systemd[1]: Started quadlet-demo-mysql.service. ░░ Subject: A start job for unit quadlet-demo-mysql.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-demo-mysql.service has finished successfully. ░░ ░░ The job identifier is 3186. Jan 07 09:14:21 managed-node2 podman[38113]: 2025-01-07 09:14:21.223745652 -0500 EST m=+0.289523310 container start c22d23c0f7b46cca50b2c4b3810f209784c1310f514d6666ef9df66132609bfd (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service) Jan 07 09:14:21 managed-node2 quadlet-demo-mysql[38113]: c22d23c0f7b46cca50b2c4b3810f209784c1310f514d6666ef9df66132609bfd Jan 07 09:14:21 managed-node2 podman[38176]: 2025-01-07 09:14:21.717094811 -0500 EST m=+0.467717036 container health_status c22d23c0f7b46cca50b2c4b3810f209784c1310f514d6666ef9df66132609bfd (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, health_status=healthy, health_failing_streak=0, health_log=, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service) Jan 07 09:14:22 managed-node2 python3.12[38374]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 07 09:14:24 managed-node2 python3.12[38537]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 07 09:14:25 managed-node2 python3.12[38668]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/envoy-proxy-configmap.yml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 07 09:14:25 managed-node2 python3.12[38773]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1736259264.7863326-20711-60529197268232/.source.yml dest=/etc/containers/systemd/envoy-proxy-configmap.yml owner=root group=0 mode=0644 _original_basename=envoy-proxy-configmap.yml follow=False checksum=d681c7d56f912150d041873e880818b22a90c188 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jan 07 09:14:26 managed-node2 python3.12[38904]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 07 09:14:26 managed-node2 systemd[1]: Reload requested from client PID 38905 ('systemctl') (unit session-6.scope)... Jan 07 09:14:26 managed-node2 systemd[1]: Reloading... Jan 07 09:14:26 managed-node2 systemd[1]: Reloading finished in 400 ms. Jan 07 09:14:26 managed-node2 systemd[1]: Starting dnf-makecache.service - dnf makecache... ░░ Subject: A start job for unit dnf-makecache.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit dnf-makecache.service has begun execution. ░░ ░░ The job identifier is 3508. Jan 07 09:14:26 managed-node2 dnf[38990]: Beaker Client - RedHatEnterpriseLinux9 11 kB/s | 1.5 kB 00:00 Jan 07 09:14:26 managed-node2 dnf[38990]: Beaker harness 18 kB/s | 1.3 kB 00:00 Jan 07 09:14:26 managed-node2 dnf[38990]: Copr repo for beakerlib-libraries owned by bgon 49 kB/s | 1.8 kB 00:00 Jan 07 09:14:27 managed-node2 dnf[38990]: CentOS Stream 10 - BaseOS 53 kB/s | 2.3 kB 00:00 Jan 07 09:14:27 managed-node2 dnf[38990]: CentOS Stream 10 - AppStream 60 kB/s | 2.3 kB 00:00 Jan 07 09:14:27 managed-node2 dnf[38990]: CentOS Stream 10 - HighAvailability 63 kB/s | 2.3 kB 00:00 Jan 07 09:14:27 managed-node2 dnf[38990]: Metadata cache created. Jan 07 09:14:27 managed-node2 systemd[1]: dnf-makecache.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit dnf-makecache.service has successfully entered the 'dead' state. Jan 07 09:14:27 managed-node2 systemd[1]: Finished dnf-makecache.service - dnf makecache. ░░ Subject: A start job for unit dnf-makecache.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit dnf-makecache.service has finished successfully. ░░ ░░ The job identifier is 3508. Jan 07 09:14:27 managed-node2 python3.12[39137]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 07 09:14:29 managed-node2 python3.12[39277]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 07 09:14:30 managed-node2 python3.12[39408]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/quadlet-demo.yml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 07 09:14:30 managed-node2 python3.12[39513]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/systemd/quadlet-demo.yml owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1736259269.9375236-20890-15337147536376/.source.yml _original_basename=.xtqxu9q2 follow=False checksum=998dccde0483b1654327a46ddd89cbaa47650370 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jan 07 09:14:31 managed-node2 systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state. Jan 07 09:14:31 managed-node2 python3.12[39667]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 07 09:14:31 managed-node2 systemd[1]: Reload requested from client PID 39669 ('systemctl') (unit session-6.scope)... Jan 07 09:14:31 managed-node2 systemd[1]: Reloading... Jan 07 09:14:31 managed-node2 systemd[1]: Reloading finished in 225 ms. Jan 07 09:14:32 managed-node2 python3.12[39855]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 07 09:14:33 managed-node2 python3.12[39988]: ansible-slurp Invoked with path=/etc/containers/systemd/quadlet-demo.yml src=/etc/containers/systemd/quadlet-demo.yml Jan 07 09:14:34 managed-node2 python3.12[40119]: ansible-file Invoked with path=/tmp/httpd3 state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Jan 07 09:14:34 managed-node2 python3.12[40250]: ansible-file Invoked with path=/tmp/httpd3-create state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Jan 07 09:14:52 managed-node2 podman[40580]: 2025-01-07 09:14:52.442310371 -0500 EST m=+0.617926399 container health_status c22d23c0f7b46cca50b2c4b3810f209784c1310f514d6666ef9df66132609bfd (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, health_status=healthy, health_failing_streak=0, health_log=, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service) Jan 07 09:14:53 managed-node2 podman[40389]: 2025-01-07 09:14:53.747429097 -0500 EST m=+18.758486426 image pull fcf3e41b8864a14d75a6d0627d3d02154e28a153aa57e8baa392cd744ffa0d0b quay.io/linux-system-roles/wordpress:4.8-apache Jan 07 09:15:02 managed-node2 podman[40826]: 2025-01-07 09:15:02.55194195 -0500 EST m=+8.328656821 image pull 5af2585e22ed1562885d9407efab74010090427be79048c2cd6a226517cc1e1d quay.io/linux-system-roles/envoyproxy:v1.25.0 Jan 07 09:15:02 managed-node2 python3.12[41090]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 07 09:15:03 managed-node2 python3.12[41221]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/quadlet-demo.kube follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 07 09:15:03 managed-node2 python3.12[41326]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1736259303.1670082-21735-227828488195663/.source.kube dest=/etc/containers/systemd/quadlet-demo.kube owner=root group=0 mode=0644 _original_basename=quadlet-demo.kube follow=False checksum=7a5c73a5d935a42431c87bcdbeb8a04ed0909dc7 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jan 07 09:15:04 managed-node2 python3.12[41457]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 07 09:15:04 managed-node2 systemd[1]: Reload requested from client PID 41458 ('systemctl') (unit session-6.scope)... Jan 07 09:15:04 managed-node2 systemd[1]: Reloading... Jan 07 09:15:04 managed-node2 systemd[1]: Reloading finished in 223 ms. Jan 07 09:15:05 managed-node2 python3.12[41644]: ansible-systemd Invoked with name=quadlet-demo.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Jan 07 09:15:05 managed-node2 systemd[1]: Starting quadlet-demo.service... ░░ Subject: A start job for unit quadlet-demo.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-demo.service has begun execution. ░░ ░░ The job identifier is 3664. Jan 07 09:15:05 managed-node2 quadlet-demo[41648]: Pods stopped: Jan 07 09:15:05 managed-node2 quadlet-demo[41648]: Pods removed: Jan 07 09:15:05 managed-node2 quadlet-demo[41648]: Secrets removed: Jan 07 09:15:05 managed-node2 quadlet-demo[41648]: Volumes removed: Jan 07 09:15:05 managed-node2 podman[41648]: 2025-01-07 09:15:05.603840603 -0500 EST m=+0.032211395 volume create wp-pv-claim Jan 07 09:15:05 managed-node2 podman[41648]: 2025-01-07 09:15:05.731730351 -0500 EST m=+0.160101142 container create f433509c99b16be8d0e6a17bbb246d7ee67124e6f7b76f5f0a151af506b6a8e2 (image=localhost/podman-pause:5.3.1-1733097600, name=a96f3a51b8d1-service, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Jan 07 09:15:05 managed-node2 podman[41648]: 2025-01-07 09:15:05.742380428 -0500 EST m=+0.170751219 volume create envoy-proxy-config Jan 07 09:15:05 managed-node2 podman[41648]: 2025-01-07 09:15:05.748870558 -0500 EST m=+0.177241365 volume create envoy-certificates Jan 07 09:15:05 managed-node2 systemd[1]: Created slice machine-libpod_pod_7cd86f9cd24930c895fc6b2568c209e946e3427408f75dcae50e4de648fa77fb.slice - cgroup machine-libpod_pod_7cd86f9cd24930c895fc6b2568c209e946e3427408f75dcae50e4de648fa77fb.slice. ░░ Subject: A start job for unit machine-libpod_pod_7cd86f9cd24930c895fc6b2568c209e946e3427408f75dcae50e4de648fa77fb.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit machine-libpod_pod_7cd86f9cd24930c895fc6b2568c209e946e3427408f75dcae50e4de648fa77fb.slice has finished successfully. ░░ ░░ The job identifier is 3751. Jan 07 09:15:05 managed-node2 podman[41648]: 2025-01-07 09:15:05.798098532 -0500 EST m=+0.226469325 container create 7cdb1162de3dfa3f1d0818f9055f20069ee37517a64da55eea9912ec1f333718 (image=localhost/podman-pause:5.3.1-1733097600, name=7cd86f9cd249-infra, pod_id=7cd86f9cd24930c895fc6b2568c209e946e3427408f75dcae50e4de648fa77fb, PODMAN_SYSTEMD_UNIT=quadlet-demo.service, io.buildah.version=1.38.0) Jan 07 09:15:05 managed-node2 podman[41648]: 2025-01-07 09:15:05.804207227 -0500 EST m=+0.232578028 pod create 7cd86f9cd24930c895fc6b2568c209e946e3427408f75dcae50e4de648fa77fb (image=, name=quadlet-demo) Jan 07 09:15:05 managed-node2 podman[41648]: 2025-01-07 09:15:05.846647839 -0500 EST m=+0.275019004 container create b6284e900e3840b08a5e47ae4df256da3cf59d74d098dd33e44ea991d48167a7 (image=quay.io/linux-system-roles/wordpress:4.8-apache, name=quadlet-demo-wordpress, pod_id=7cd86f9cd24930c895fc6b2568c209e946e3427408f75dcae50e4de648fa77fb, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Jan 07 09:15:05 managed-node2 podman[41648]: 2025-01-07 09:15:05.876274204 -0500 EST m=+0.304644999 container create 8975483c7a2ea776b03e7bbde585079344a1e57af10c6dd6be988bd99669e58b (image=quay.io/linux-system-roles/envoyproxy:v1.25.0, name=quadlet-demo-envoy, pod_id=7cd86f9cd24930c895fc6b2568c209e946e3427408f75dcae50e4de648fa77fb, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Jan 07 09:15:05 managed-node2 podman[41648]: 2025-01-07 09:15:05.877124564 -0500 EST m=+0.305495454 container restart f433509c99b16be8d0e6a17bbb246d7ee67124e6f7b76f5f0a151af506b6a8e2 (image=localhost/podman-pause:5.3.1-1733097600, name=a96f3a51b8d1-service, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Jan 07 09:15:05 managed-node2 podman[41648]: 2025-01-07 09:15:05.813482805 -0500 EST m=+0.241853898 image pull fcf3e41b8864a14d75a6d0627d3d02154e28a153aa57e8baa392cd744ffa0d0b quay.io/linux-system-roles/wordpress:4.8-apache Jan 07 09:15:05 managed-node2 podman[41648]: 2025-01-07 09:15:05.85064232 -0500 EST m=+0.279013516 image pull 5af2585e22ed1562885d9407efab74010090427be79048c2cd6a226517cc1e1d quay.io/linux-system-roles/envoyproxy:v1.25.0 Jan 07 09:15:05 managed-node2 systemd[1]: Started libpod-f433509c99b16be8d0e6a17bbb246d7ee67124e6f7b76f5f0a151af506b6a8e2.scope - libcrun container. ░░ Subject: A start job for unit libpod-f433509c99b16be8d0e6a17bbb246d7ee67124e6f7b76f5f0a151af506b6a8e2.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-f433509c99b16be8d0e6a17bbb246d7ee67124e6f7b76f5f0a151af506b6a8e2.scope has finished successfully. ░░ ░░ The job identifier is 3757. Jan 07 09:15:05 managed-node2 podman[41648]: 2025-01-07 09:15:05.932092861 -0500 EST m=+0.360463806 container init f433509c99b16be8d0e6a17bbb246d7ee67124e6f7b76f5f0a151af506b6a8e2 (image=localhost/podman-pause:5.3.1-1733097600, name=a96f3a51b8d1-service, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Jan 07 09:15:05 managed-node2 podman[41648]: 2025-01-07 09:15:05.934747592 -0500 EST m=+0.363118557 container start f433509c99b16be8d0e6a17bbb246d7ee67124e6f7b76f5f0a151af506b6a8e2 (image=localhost/podman-pause:5.3.1-1733097600, name=a96f3a51b8d1-service, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Jan 07 09:15:05 managed-node2 kernel: podman2: port 2(veth3) entered blocking state Jan 07 09:15:05 managed-node2 kernel: podman2: port 2(veth3) entered disabled state Jan 07 09:15:05 managed-node2 kernel: veth3: entered allmulticast mode Jan 07 09:15:05 managed-node2 kernel: veth3: entered promiscuous mode Jan 07 09:15:05 managed-node2 kernel: podman2: port 2(veth3) entered blocking state Jan 07 09:15:05 managed-node2 kernel: podman2: port 2(veth3) entered forwarding state Jan 07 09:15:05 managed-node2 NetworkManager[780]: [1736259305.9637] manager: (veth3): new Veth device (/org/freedesktop/NetworkManager/Devices/11) Jan 07 09:15:05 managed-node2 NetworkManager[780]: [1736259305.9679] device (veth3): carrier: link connected Jan 07 09:15:05 managed-node2 (udev-worker)[41669]: Network interface NamePolicy= disabled on kernel command line. Jan 07 09:15:06 managed-node2 systemd[1]: Started libpod-7cdb1162de3dfa3f1d0818f9055f20069ee37517a64da55eea9912ec1f333718.scope - libcrun container. ░░ Subject: A start job for unit libpod-7cdb1162de3dfa3f1d0818f9055f20069ee37517a64da55eea9912ec1f333718.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-7cdb1162de3dfa3f1d0818f9055f20069ee37517a64da55eea9912ec1f333718.scope has finished successfully. ░░ ░░ The job identifier is 3764. Jan 07 09:15:06 managed-node2 podman[41648]: 2025-01-07 09:15:06.056224391 -0500 EST m=+0.484595266 container init 7cdb1162de3dfa3f1d0818f9055f20069ee37517a64da55eea9912ec1f333718 (image=localhost/podman-pause:5.3.1-1733097600, name=7cd86f9cd249-infra, pod_id=7cd86f9cd24930c895fc6b2568c209e946e3427408f75dcae50e4de648fa77fb, PODMAN_SYSTEMD_UNIT=quadlet-demo.service, io.buildah.version=1.38.0) Jan 07 09:15:06 managed-node2 podman[41648]: 2025-01-07 09:15:06.06016321 -0500 EST m=+0.488534068 container start 7cdb1162de3dfa3f1d0818f9055f20069ee37517a64da55eea9912ec1f333718 (image=localhost/podman-pause:5.3.1-1733097600, name=7cd86f9cd249-infra, pod_id=7cd86f9cd24930c895fc6b2568c209e946e3427408f75dcae50e4de648fa77fb, PODMAN_SYSTEMD_UNIT=quadlet-demo.service, io.buildah.version=1.38.0) Jan 07 09:15:06 managed-node2 systemd[1]: Started libpod-b6284e900e3840b08a5e47ae4df256da3cf59d74d098dd33e44ea991d48167a7.scope - libcrun container. ░░ Subject: A start job for unit libpod-b6284e900e3840b08a5e47ae4df256da3cf59d74d098dd33e44ea991d48167a7.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-b6284e900e3840b08a5e47ae4df256da3cf59d74d098dd33e44ea991d48167a7.scope has finished successfully. ░░ ░░ The job identifier is 3771. Jan 07 09:15:06 managed-node2 podman[41648]: 2025-01-07 09:15:06.117341144 -0500 EST m=+0.545712103 container init b6284e900e3840b08a5e47ae4df256da3cf59d74d098dd33e44ea991d48167a7 (image=quay.io/linux-system-roles/wordpress:4.8-apache, name=quadlet-demo-wordpress, pod_id=7cd86f9cd24930c895fc6b2568c209e946e3427408f75dcae50e4de648fa77fb, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Jan 07 09:15:06 managed-node2 podman[41648]: 2025-01-07 09:15:06.120032861 -0500 EST m=+0.548403708 container start b6284e900e3840b08a5e47ae4df256da3cf59d74d098dd33e44ea991d48167a7 (image=quay.io/linux-system-roles/wordpress:4.8-apache, name=quadlet-demo-wordpress, pod_id=7cd86f9cd24930c895fc6b2568c209e946e3427408f75dcae50e4de648fa77fb, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Jan 07 09:15:07 managed-node2 systemd[1]: Started libpod-8975483c7a2ea776b03e7bbde585079344a1e57af10c6dd6be988bd99669e58b.scope - libcrun container. ░░ Subject: A start job for unit libpod-8975483c7a2ea776b03e7bbde585079344a1e57af10c6dd6be988bd99669e58b.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-8975483c7a2ea776b03e7bbde585079344a1e57af10c6dd6be988bd99669e58b.scope has finished successfully. ░░ ░░ The job identifier is 3778. Jan 07 09:15:07 managed-node2 podman[41648]: 2025-01-07 09:15:07.235011174 -0500 EST m=+1.663382056 container init 8975483c7a2ea776b03e7bbde585079344a1e57af10c6dd6be988bd99669e58b (image=quay.io/linux-system-roles/envoyproxy:v1.25.0, name=quadlet-demo-envoy, pod_id=7cd86f9cd24930c895fc6b2568c209e946e3427408f75dcae50e4de648fa77fb, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Jan 07 09:15:07 managed-node2 podman[41648]: 2025-01-07 09:15:07.238016036 -0500 EST m=+1.666386966 container start 8975483c7a2ea776b03e7bbde585079344a1e57af10c6dd6be988bd99669e58b (image=quay.io/linux-system-roles/envoyproxy:v1.25.0, name=quadlet-demo-envoy, pod_id=7cd86f9cd24930c895fc6b2568c209e946e3427408f75dcae50e4de648fa77fb, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Jan 07 09:15:07 managed-node2 podman[41648]: 2025-01-07 09:15:07.278508651 -0500 EST m=+1.706879540 pod start 7cd86f9cd24930c895fc6b2568c209e946e3427408f75dcae50e4de648fa77fb (image=, name=quadlet-demo) Jan 07 09:15:07 managed-node2 quadlet-demo[41648]: Volumes: Jan 07 09:15:07 managed-node2 quadlet-demo[41648]: wp-pv-claim Jan 07 09:15:07 managed-node2 quadlet-demo[41648]: Pod: Jan 07 09:15:07 managed-node2 quadlet-demo[41648]: 7cd86f9cd24930c895fc6b2568c209e946e3427408f75dcae50e4de648fa77fb Jan 07 09:15:07 managed-node2 quadlet-demo[41648]: Containers: Jan 07 09:15:07 managed-node2 quadlet-demo[41648]: b6284e900e3840b08a5e47ae4df256da3cf59d74d098dd33e44ea991d48167a7 Jan 07 09:15:07 managed-node2 quadlet-demo[41648]: 8975483c7a2ea776b03e7bbde585079344a1e57af10c6dd6be988bd99669e58b Jan 07 09:15:07 managed-node2 systemd[1]: Started quadlet-demo.service. ░░ Subject: A start job for unit quadlet-demo.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-demo.service has finished successfully. ░░ ░░ The job identifier is 3664. Jan 07 09:15:08 managed-node2 python3.12[42036]: ansible-ansible.legacy.command Invoked with _raw_params=ls -alrtF /etc/containers/systemd _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 07 09:15:08 managed-node2 python3.12[42168]: ansible-ansible.legacy.command Invoked with _raw_params=podman ps -a _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 07 09:15:09 managed-node2 python3.12[42307]: ansible-ansible.legacy.command Invoked with _raw_params=podman volume ls _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 07 09:15:09 managed-node2 python3.12[42445]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod ps --ctr-ids --ctr-names --ctr-status _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 07 09:15:09 managed-node2 python3.12[42584]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail; systemctl list-units | grep quadlet _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 07 09:15:10 managed-node2 python3.12[42718]: ansible-get_url Invoked with url=https://localhost:8000 dest=/run/out mode=0600 validate_certs=False force=False http_agent=ansible-httpget use_proxy=True force_basic_auth=False use_gssapi=False backup=False checksum= timeout=10 unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None headers=None tmp_dest=None ciphers=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 07 09:15:15 managed-node2 python3.12[42849]: ansible-get_url Invoked with url=https://localhost:8000 dest=/run/out mode=0600 validate_certs=False force=False http_agent=ansible-httpget use_proxy=True force_basic_auth=False use_gssapi=False backup=False checksum= timeout=10 unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None headers=None tmp_dest=None ciphers=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 07 09:15:21 managed-node2 python3.12[42980]: ansible-get_url Invoked with url=https://localhost:8000 dest=/run/out mode=0600 validate_certs=False force=False http_agent=ansible-httpget use_proxy=True force_basic_auth=False use_gssapi=False backup=False checksum= timeout=10 unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None headers=None tmp_dest=None ciphers=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 07 09:15:22 managed-node2 podman[43002]: 2025-01-07 09:15:22.792072206 -0500 EST m=+0.094428755 container health_status c22d23c0f7b46cca50b2c4b3810f209784c1310f514d6666ef9df66132609bfd (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, health_status=healthy, health_failing_streak=0, health_log=, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service) Jan 07 09:15:26 managed-node2 python3.12[43127]: ansible-get_url Invoked with url=https://localhost:8000 dest=/run/out mode=0600 validate_certs=False force=False http_agent=ansible-httpget use_proxy=True force_basic_auth=False use_gssapi=False backup=False checksum= timeout=10 unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None headers=None tmp_dest=None ciphers=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 07 09:15:32 managed-node2 python3.12[43258]: ansible-get_url Invoked with url=https://localhost:8000 dest=/run/out mode=0600 validate_certs=False force=False http_agent=ansible-httpget use_proxy=True force_basic_auth=False use_gssapi=False backup=False checksum= timeout=10 unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None headers=None tmp_dest=None ciphers=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 07 09:15:37 managed-node2 python3.12[43389]: ansible-get_url Invoked with url=https://localhost:8000 dest=/run/out mode=0600 validate_certs=False force=False http_agent=ansible-httpget use_proxy=True force_basic_auth=False use_gssapi=False backup=False checksum= timeout=10 unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None headers=None tmp_dest=None ciphers=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 07 09:15:43 managed-node2 python3.12[43520]: ansible-get_url Invoked with url=https://localhost:8000 dest=/run/out mode=0600 validate_certs=False force=False http_agent=ansible-httpget use_proxy=True force_basic_auth=False use_gssapi=False backup=False checksum= timeout=10 unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None headers=None tmp_dest=None ciphers=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 07 09:15:43 managed-node2 python3.12[43651]: ansible-ansible.legacy.command Invoked with _raw_params=journalctl -ex _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 07 09:15:43 managed-node2 python3.12[43783]: ansible-ansible.legacy.command Invoked with _raw_params=podman ps -a _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 07 09:15:44 managed-node2 python3.12[43921]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod ps --ctr-ids --ctr-names --ctr-status _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 07 09:15:44 managed-node2 python3.12[44060]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail; systemctl list-units --all | grep quadlet _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 07 09:15:45 managed-node2 python3.12[44194]: ansible-ansible.legacy.command Invoked with _raw_params=ls -alrtF /etc/systemd/system _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 07 09:15:47 managed-node2 python3.12[44457]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 07 09:15:48 managed-node2 python3.12[44594]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 07 09:15:50 managed-node2 python3.12[44727]: ansible-ansible.legacy.dnf Invoked with name=['firewalld'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 07 09:15:51 managed-node2 python3.12[44859]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False scope=system no_block=False state=None enabled=None force=None Jan 07 09:15:51 managed-node2 python3.12[44992]: ansible-ansible.legacy.systemd Invoked with name=firewalld state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Jan 07 09:15:52 managed-node2 python3.12[45125]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['8000/tcp'] permanent=True runtime=True state=enabled __report_changed=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Jan 07 09:15:52 managed-node2 python3.12[45256]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['9000/tcp'] permanent=True runtime=True state=enabled __report_changed=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Jan 07 09:15:53 managed-node2 podman[45278]: 2025-01-07 09:15:53.784313454 -0500 EST m=+0.094295829 container health_status c22d23c0f7b46cca50b2c4b3810f209784c1310f514d6666ef9df66132609bfd (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, health_status=healthy, health_failing_streak=0, health_log=, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service) Jan 07 09:15:59 managed-node2 python3.12[45820]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 07 09:16:01 managed-node2 python3.12[45953]: ansible-systemd Invoked with name=quadlet-demo.service scope=system state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None Jan 07 09:16:01 managed-node2 systemd[1]: Reload requested from client PID 45956 ('systemctl') (unit session-6.scope)... Jan 07 09:16:01 managed-node2 systemd[1]: Reloading... Jan 07 09:16:01 managed-node2 systemd[1]: Reloading finished in 242 ms. Jan 07 09:16:02 managed-node2 systemd[1]: Stopping quadlet-demo.service... ░░ Subject: A stop job for unit quadlet-demo.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit quadlet-demo.service has begun execution. ░░ ░░ The job identifier is 3941. Jan 07 09:16:02 managed-node2 systemd[1]: libpod-f433509c99b16be8d0e6a17bbb246d7ee67124e6f7b76f5f0a151af506b6a8e2.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-f433509c99b16be8d0e6a17bbb246d7ee67124e6f7b76f5f0a151af506b6a8e2.scope has successfully entered the 'dead' state. Jan 07 09:16:02 managed-node2 podman[46014]: 2025-01-07 09:16:02.060522231 -0500 EST m=+0.023672528 container died f433509c99b16be8d0e6a17bbb246d7ee67124e6f7b76f5f0a151af506b6a8e2 (image=localhost/podman-pause:5.3.1-1733097600, name=a96f3a51b8d1-service, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Jan 07 09:16:02 managed-node2 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-f433509c99b16be8d0e6a17bbb246d7ee67124e6f7b76f5f0a151af506b6a8e2-userdata-shm.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay\x2dcontainers-f433509c99b16be8d0e6a17bbb246d7ee67124e6f7b76f5f0a151af506b6a8e2-userdata-shm.mount has successfully entered the 'dead' state. Jan 07 09:16:02 managed-node2 systemd[1]: var-lib-containers-storage-overlay-09bce270c750f4aa7497e1b4639c9795436f7b4cd72de33e54af50ef01ff260f-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-09bce270c750f4aa7497e1b4639c9795436f7b4cd72de33e54af50ef01ff260f-merged.mount has successfully entered the 'dead' state. Jan 07 09:16:02 managed-node2 podman[46014]: 2025-01-07 09:16:02.102194169 -0500 EST m=+0.065344783 container cleanup f433509c99b16be8d0e6a17bbb246d7ee67124e6f7b76f5f0a151af506b6a8e2 (image=localhost/podman-pause:5.3.1-1733097600, name=a96f3a51b8d1-service, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Jan 07 09:16:02 managed-node2 podman[46023]: 2025-01-07 09:16:02.151636463 -0500 EST m=+0.025748065 pod stop 7cd86f9cd24930c895fc6b2568c209e946e3427408f75dcae50e4de648fa77fb (image=, name=quadlet-demo) Jan 07 09:16:02 managed-node2 systemd[1]: libpod-8975483c7a2ea776b03e7bbde585079344a1e57af10c6dd6be988bd99669e58b.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-8975483c7a2ea776b03e7bbde585079344a1e57af10c6dd6be988bd99669e58b.scope has successfully entered the 'dead' state. Jan 07 09:16:02 managed-node2 conmon[41825]: conmon 8975483c7a2ea776b03e : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_7cd86f9cd24930c895fc6b2568c209e946e3427408f75dcae50e4de648fa77fb.slice/libpod-8975483c7a2ea776b03e7bbde585079344a1e57af10c6dd6be988bd99669e58b.scope/container/memory.events Jan 07 09:16:02 managed-node2 systemd[1]: libpod-7cdb1162de3dfa3f1d0818f9055f20069ee37517a64da55eea9912ec1f333718.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-7cdb1162de3dfa3f1d0818f9055f20069ee37517a64da55eea9912ec1f333718.scope has successfully entered the 'dead' state. Jan 07 09:16:02 managed-node2 podman[46023]: 2025-01-07 09:16:02.184510944 -0500 EST m=+0.058622704 container died 7cdb1162de3dfa3f1d0818f9055f20069ee37517a64da55eea9912ec1f333718 (image=localhost/podman-pause:5.3.1-1733097600, name=7cd86f9cd249-infra, PODMAN_SYSTEMD_UNIT=quadlet-demo.service, io.buildah.version=1.38.0) Jan 07 09:16:02 managed-node2 podman[46023]: 2025-01-07 09:16:02.21392549 -0500 EST m=+0.088037137 container died 8975483c7a2ea776b03e7bbde585079344a1e57af10c6dd6be988bd99669e58b (image=quay.io/linux-system-roles/envoyproxy:v1.25.0, name=quadlet-demo-envoy, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Jan 07 09:16:02 managed-node2 kernel: podman2: port 2(veth3) entered disabled state Jan 07 09:16:02 managed-node2 kernel: veth3 (unregistering): left allmulticast mode Jan 07 09:16:02 managed-node2 kernel: veth3 (unregistering): left promiscuous mode Jan 07 09:16:02 managed-node2 kernel: podman2: port 2(veth3) entered disabled state Jan 07 09:16:02 managed-node2 systemd[1]: run-netns-netns\x2da9e82f3f\x2d120d\x2daf03\x2d7bca\x2de6fc83d52210.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-netns-netns\x2da9e82f3f\x2d120d\x2daf03\x2d7bca\x2de6fc83d52210.mount has successfully entered the 'dead' state. Jan 07 09:16:02 managed-node2 systemd[1]: var-lib-containers-storage-overlay-5a4b78b3bc6650c5e8e7638eb388ad7d84a108b6467977a108e726ebca20140e-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-5a4b78b3bc6650c5e8e7638eb388ad7d84a108b6467977a108e726ebca20140e-merged.mount has successfully entered the 'dead' state. Jan 07 09:16:02 managed-node2 podman[46023]: 2025-01-07 09:16:02.282784965 -0500 EST m=+0.156896570 container cleanup 8975483c7a2ea776b03e7bbde585079344a1e57af10c6dd6be988bd99669e58b (image=quay.io/linux-system-roles/envoyproxy:v1.25.0, name=quadlet-demo-envoy, pod_id=7cd86f9cd24930c895fc6b2568c209e946e3427408f75dcae50e4de648fa77fb, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Jan 07 09:16:02 managed-node2 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-7cdb1162de3dfa3f1d0818f9055f20069ee37517a64da55eea9912ec1f333718-userdata-shm.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay\x2dcontainers-7cdb1162de3dfa3f1d0818f9055f20069ee37517a64da55eea9912ec1f333718-userdata-shm.mount has successfully entered the 'dead' state. Jan 07 09:16:02 managed-node2 systemd[1]: libpod-b6284e900e3840b08a5e47ae4df256da3cf59d74d098dd33e44ea991d48167a7.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-b6284e900e3840b08a5e47ae4df256da3cf59d74d098dd33e44ea991d48167a7.scope has successfully entered the 'dead' state. Jan 07 09:16:02 managed-node2 podman[46023]: 2025-01-07 09:16:02.309691148 -0500 EST m=+0.183802673 container cleanup 7cdb1162de3dfa3f1d0818f9055f20069ee37517a64da55eea9912ec1f333718 (image=localhost/podman-pause:5.3.1-1733097600, name=7cd86f9cd249-infra, pod_id=7cd86f9cd24930c895fc6b2568c209e946e3427408f75dcae50e4de648fa77fb, PODMAN_SYSTEMD_UNIT=quadlet-demo.service, io.buildah.version=1.38.0) Jan 07 09:16:02 managed-node2 podman[46023]: 2025-01-07 09:16:02.31128568 -0500 EST m=+0.185397309 container died b6284e900e3840b08a5e47ae4df256da3cf59d74d098dd33e44ea991d48167a7 (image=quay.io/linux-system-roles/wordpress:4.8-apache, name=quadlet-demo-wordpress, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Jan 07 09:16:02 managed-node2 podman[46023]: 2025-01-07 09:16:02.354538784 -0500 EST m=+0.228650261 container cleanup b6284e900e3840b08a5e47ae4df256da3cf59d74d098dd33e44ea991d48167a7 (image=quay.io/linux-system-roles/wordpress:4.8-apache, name=quadlet-demo-wordpress, pod_id=7cd86f9cd24930c895fc6b2568c209e946e3427408f75dcae50e4de648fa77fb, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Jan 07 09:16:02 managed-node2 systemd[1]: Removed slice machine-libpod_pod_7cd86f9cd24930c895fc6b2568c209e946e3427408f75dcae50e4de648fa77fb.slice - cgroup machine-libpod_pod_7cd86f9cd24930c895fc6b2568c209e946e3427408f75dcae50e4de648fa77fb.slice. ░░ Subject: A stop job for unit machine-libpod_pod_7cd86f9cd24930c895fc6b2568c209e946e3427408f75dcae50e4de648fa77fb.slice has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit machine-libpod_pod_7cd86f9cd24930c895fc6b2568c209e946e3427408f75dcae50e4de648fa77fb.slice has finished. ░░ ░░ The job identifier is 3943 and the job result is done. Jan 07 09:16:02 managed-node2 podman[46023]: 2025-01-07 09:16:02.36250807 -0500 EST m=+0.236619535 pod stop 7cd86f9cd24930c895fc6b2568c209e946e3427408f75dcae50e4de648fa77fb (image=, name=quadlet-demo) Jan 07 09:16:02 managed-node2 systemd[1]: machine-libpod_pod_7cd86f9cd24930c895fc6b2568c209e946e3427408f75dcae50e4de648fa77fb.slice: Failed to open /run/systemd/transient/machine-libpod_pod_7cd86f9cd24930c895fc6b2568c209e946e3427408f75dcae50e4de648fa77fb.slice: No such file or directory Jan 07 09:16:02 managed-node2 podman[46023]: 2025-01-07 09:16:02.370187774 -0500 EST m=+0.244299301 pod stop 7cd86f9cd24930c895fc6b2568c209e946e3427408f75dcae50e4de648fa77fb (image=, name=quadlet-demo) Jan 07 09:16:02 managed-node2 systemd[1]: machine-libpod_pod_7cd86f9cd24930c895fc6b2568c209e946e3427408f75dcae50e4de648fa77fb.slice: Failed to open /run/systemd/transient/machine-libpod_pod_7cd86f9cd24930c895fc6b2568c209e946e3427408f75dcae50e4de648fa77fb.slice: No such file or directory Jan 07 09:16:02 managed-node2 podman[46023]: 2025-01-07 09:16:02.393178089 -0500 EST m=+0.267289591 container remove b6284e900e3840b08a5e47ae4df256da3cf59d74d098dd33e44ea991d48167a7 (image=quay.io/linux-system-roles/wordpress:4.8-apache, name=quadlet-demo-wordpress, pod_id=7cd86f9cd24930c895fc6b2568c209e946e3427408f75dcae50e4de648fa77fb, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Jan 07 09:16:02 managed-node2 podman[46023]: 2025-01-07 09:16:02.410877673 -0500 EST m=+0.284989151 container remove 8975483c7a2ea776b03e7bbde585079344a1e57af10c6dd6be988bd99669e58b (image=quay.io/linux-system-roles/envoyproxy:v1.25.0, name=quadlet-demo-envoy, pod_id=7cd86f9cd24930c895fc6b2568c209e946e3427408f75dcae50e4de648fa77fb, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Jan 07 09:16:02 managed-node2 podman[46023]: 2025-01-07 09:16:02.441896017 -0500 EST m=+0.316007493 container remove 7cdb1162de3dfa3f1d0818f9055f20069ee37517a64da55eea9912ec1f333718 (image=localhost/podman-pause:5.3.1-1733097600, name=7cd86f9cd249-infra, pod_id=7cd86f9cd24930c895fc6b2568c209e946e3427408f75dcae50e4de648fa77fb, PODMAN_SYSTEMD_UNIT=quadlet-demo.service, io.buildah.version=1.38.0) Jan 07 09:16:02 managed-node2 systemd[1]: machine-libpod_pod_7cd86f9cd24930c895fc6b2568c209e946e3427408f75dcae50e4de648fa77fb.slice: Failed to open /run/systemd/transient/machine-libpod_pod_7cd86f9cd24930c895fc6b2568c209e946e3427408f75dcae50e4de648fa77fb.slice: No such file or directory Jan 07 09:16:02 managed-node2 podman[46023]: 2025-01-07 09:16:02.450734601 -0500 EST m=+0.324846044 pod remove 7cd86f9cd24930c895fc6b2568c209e946e3427408f75dcae50e4de648fa77fb (image=, name=quadlet-demo) Jan 07 09:16:02 managed-node2 podman[46023]: 2025-01-07 09:16:02.469334583 -0500 EST m=+0.343446058 container remove f433509c99b16be8d0e6a17bbb246d7ee67124e6f7b76f5f0a151af506b6a8e2 (image=localhost/podman-pause:5.3.1-1733097600, name=a96f3a51b8d1-service, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Jan 07 09:16:02 managed-node2 quadlet-demo[46023]: Pods stopped: Jan 07 09:16:02 managed-node2 quadlet-demo[46023]: 7cd86f9cd24930c895fc6b2568c209e946e3427408f75dcae50e4de648fa77fb Jan 07 09:16:02 managed-node2 quadlet-demo[46023]: Pods removed: Jan 07 09:16:02 managed-node2 quadlet-demo[46023]: 7cd86f9cd24930c895fc6b2568c209e946e3427408f75dcae50e4de648fa77fb Jan 07 09:16:02 managed-node2 quadlet-demo[46023]: Secrets removed: Jan 07 09:16:02 managed-node2 quadlet-demo[46023]: Volumes removed: Jan 07 09:16:02 managed-node2 quadlet-demo[46023]: time="2025-01-07T09:16:02-05:00" level=error msg="Checking whether service of container f433509c99b16be8d0e6a17bbb246d7ee67124e6f7b76f5f0a151af506b6a8e2 can be stopped: no container with ID f433509c99b16be8d0e6a17bbb246d7ee67124e6f7b76f5f0a151af506b6a8e2 found in database: no such container" Jan 07 09:16:02 managed-node2 quadlet-demo[46023]: time="2025-01-07T09:16:02-05:00" level=error msg="Checking whether service of container f433509c99b16be8d0e6a17bbb246d7ee67124e6f7b76f5f0a151af506b6a8e2 can be stopped: no container with ID f433509c99b16be8d0e6a17bbb246d7ee67124e6f7b76f5f0a151af506b6a8e2 found in database: no such container" Jan 07 09:16:02 managed-node2 quadlet-demo[46023]: time="2025-01-07T09:16:02-05:00" level=error msg="Checking whether service of container f433509c99b16be8d0e6a17bbb246d7ee67124e6f7b76f5f0a151af506b6a8e2 can be stopped: no container with ID f433509c99b16be8d0e6a17bbb246d7ee67124e6f7b76f5f0a151af506b6a8e2 found in database: no such container" Jan 07 09:16:02 managed-node2 systemd[1]: quadlet-demo.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit quadlet-demo.service has successfully entered the 'dead' state. Jan 07 09:16:02 managed-node2 systemd[1]: Stopped quadlet-demo.service. ░░ Subject: A stop job for unit quadlet-demo.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit quadlet-demo.service has finished. ░░ ░░ The job identifier is 3941 and the job result is done. Jan 07 09:16:02 managed-node2 python3.12[46199]: ansible-stat Invoked with path=/etc/containers/systemd/quadlet-demo.kube follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 07 09:16:03 managed-node2 systemd[1]: var-lib-containers-storage-overlay-2ab388b93a138ec906f366a4b9df900719085e525c8480166b4c286c41378fe7-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-2ab388b93a138ec906f366a4b9df900719085e525c8480166b4c286c41378fe7-merged.mount has successfully entered the 'dead' state. Jan 07 09:16:03 managed-node2 systemd[1]: var-lib-containers-storage-overlay-9c9dae0704b8fd1f8e09162d202a873a63403fe89dfc6917881fa181c2179cc6-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-9c9dae0704b8fd1f8e09162d202a873a63403fe89dfc6917881fa181c2179cc6-merged.mount has successfully entered the 'dead' state. Jan 07 09:16:04 managed-node2 python3.12[46463]: ansible-file Invoked with path=/etc/containers/systemd/quadlet-demo.kube state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 07 09:16:04 managed-node2 python3.12[46594]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 07 09:16:04 managed-node2 systemd[1]: Reload requested from client PID 46595 ('systemctl') (unit session-6.scope)... Jan 07 09:16:04 managed-node2 systemd[1]: Reloading... Jan 07 09:16:05 managed-node2 systemd[1]: Reloading finished in 227 ms. Jan 07 09:16:05 managed-node2 python3.12[46781]: ansible-ansible.legacy.command Invoked with _raw_params=podman image prune --all -f _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 07 09:16:06 managed-node2 podman[46782]: 2025-01-07 09:16:06.240074125 -0500 EST m=+0.592645619 image untag fcf3e41b8864a14d75a6d0627d3d02154e28a153aa57e8baa392cd744ffa0d0b quay.io/linux-system-roles/wordpress:4.8-apache Jan 07 09:16:06 managed-node2 podman[46782]: 2025-01-07 09:16:05.667330864 -0500 EST m=+0.019902314 image remove fcf3e41b8864a14d75a6d0627d3d02154e28a153aa57e8baa392cd744ffa0d0b Jan 07 09:16:06 managed-node2 podman[46782]: 2025-01-07 09:16:06.380465821 -0500 EST m=+0.733037224 image untag 5af2585e22ed1562885d9407efab74010090427be79048c2cd6a226517cc1e1d quay.io/linux-system-roles/envoyproxy:v1.25.0 Jan 07 09:16:06 managed-node2 podman[46782]: 2025-01-07 09:16:06.240086019 -0500 EST m=+0.592657386 image remove 5af2585e22ed1562885d9407efab74010090427be79048c2cd6a226517cc1e1d Jan 07 09:16:07 managed-node2 python3.12[46920]: ansible-ansible.legacy.command Invoked with _raw_params=podman images -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 07 09:16:07 managed-node2 python3.12[47058]: ansible-ansible.legacy.command Invoked with _raw_params=podman volume ls -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 07 09:16:08 managed-node2 python3.12[47197]: ansible-ansible.legacy.command Invoked with _raw_params=podman ps --noheading _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 07 09:16:08 managed-node2 python3.12[47336]: ansible-ansible.legacy.command Invoked with _raw_params=podman network ls -n -q _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 07 09:16:10 managed-node2 python3.12[47751]: ansible-service_facts Invoked Jan 07 09:16:13 managed-node2 python3.12[47989]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 07 09:16:14 managed-node2 python3.12[48122]: ansible-stat Invoked with path=/etc/containers/systemd/quadlet-demo.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 07 09:16:16 managed-node2 python3.12[48386]: ansible-file Invoked with path=/etc/containers/systemd/quadlet-demo.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 07 09:16:16 managed-node2 python3.12[48517]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 07 09:16:16 managed-node2 systemd[1]: Reload requested from client PID 48518 ('systemctl') (unit session-6.scope)... Jan 07 09:16:16 managed-node2 systemd[1]: Reloading... Jan 07 09:16:17 managed-node2 systemd[1]: Reloading finished in 219 ms. Jan 07 09:16:17 managed-node2 podman[48705]: 2025-01-07 09:16:17.936904202 -0500 EST m=+0.025511048 volume remove envoy-proxy-config Jan 07 09:16:18 managed-node2 podman[48843]: 2025-01-07 09:16:18.383306749 -0500 EST m=+0.037610474 volume remove envoy-certificates Jan 07 09:16:18 managed-node2 podman[48982]: 2025-01-07 09:16:18.84637826 -0500 EST m=+0.057941819 volume remove wp-pv-claim Jan 07 09:16:19 managed-node2 python3.12[49120]: ansible-ansible.legacy.command Invoked with _raw_params=podman image prune --all -f _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 07 09:16:20 managed-node2 python3.12[49258]: ansible-ansible.legacy.command Invoked with _raw_params=podman images -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 07 09:16:20 managed-node2 python3.12[49396]: ansible-ansible.legacy.command Invoked with _raw_params=podman volume ls -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 07 09:16:21 managed-node2 python3.12[49534]: ansible-ansible.legacy.command Invoked with _raw_params=podman ps --noheading _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 07 09:16:21 managed-node2 python3.12[49672]: ansible-ansible.legacy.command Invoked with _raw_params=podman network ls -n -q _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 07 09:16:23 managed-node2 python3.12[50086]: ansible-service_facts Invoked Jan 07 09:16:23 managed-node2 podman[50094]: 2025-01-07 09:16:23.918171617 -0500 EST m=+0.082490811 container health_status c22d23c0f7b46cca50b2c4b3810f209784c1310f514d6666ef9df66132609bfd (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, health_status=healthy, health_failing_streak=1, health_log=, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service) Jan 07 09:16:23 managed-node2 systemd[1]: c22d23c0f7b46cca50b2c4b3810f209784c1310f514d6666ef9df66132609bfd-7ac923f5855b5f9d.service: Main process exited, code=exited, status=125/n/a ░░ Subject: Unit process exited ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ An ExecStart= process belonging to unit c22d23c0f7b46cca50b2c4b3810f209784c1310f514d6666ef9df66132609bfd-7ac923f5855b5f9d.service has exited. ░░ ░░ The process' exit code is 'exited' and its exit status is 125. Jan 07 09:16:23 managed-node2 systemd[1]: c22d23c0f7b46cca50b2c4b3810f209784c1310f514d6666ef9df66132609bfd-7ac923f5855b5f9d.service: Failed with result 'exit-code'. ░░ Subject: Unit failed ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit c22d23c0f7b46cca50b2c4b3810f209784c1310f514d6666ef9df66132609bfd-7ac923f5855b5f9d.service has entered the 'failed' state with result 'exit-code'. Jan 07 09:16:27 managed-node2 python3.12[50333]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 07 09:16:29 managed-node2 python3.12[50466]: ansible-stat Invoked with path=/etc/containers/systemd/envoy-proxy-configmap.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 07 09:16:31 managed-node2 python3.12[50730]: ansible-file Invoked with path=/etc/containers/systemd/envoy-proxy-configmap.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 07 09:16:31 managed-node2 python3.12[50861]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 07 09:16:31 managed-node2 systemd[1]: Reload requested from client PID 50862 ('systemctl') (unit session-6.scope)... Jan 07 09:16:31 managed-node2 systemd[1]: Reloading... Jan 07 09:16:31 managed-node2 systemd[1]: Reloading finished in 227 ms. Jan 07 09:16:32 managed-node2 python3.12[51049]: ansible-ansible.legacy.command Invoked with _raw_params=podman image prune --all -f _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 07 09:16:33 managed-node2 python3.12[51188]: ansible-ansible.legacy.command Invoked with _raw_params=podman images -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 07 09:16:34 managed-node2 python3.12[51326]: ansible-ansible.legacy.command Invoked with _raw_params=podman volume ls -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 07 09:16:34 managed-node2 python3.12[51464]: ansible-ansible.legacy.command Invoked with _raw_params=podman ps --noheading _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 07 09:16:35 managed-node2 python3.12[51602]: ansible-ansible.legacy.command Invoked with _raw_params=podman network ls -n -q _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 07 09:16:36 managed-node2 python3.12[52018]: ansible-service_facts Invoked Jan 07 09:16:40 managed-node2 python3.12[52256]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 07 09:16:42 managed-node2 python3.12[52389]: ansible-systemd Invoked with name=quadlet-demo-mysql.service scope=system state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None Jan 07 09:16:42 managed-node2 systemd[1]: Reload requested from client PID 52392 ('systemctl') (unit session-6.scope)... Jan 07 09:16:42 managed-node2 systemd[1]: Reloading... Jan 07 09:16:42 managed-node2 systemd[1]: Reloading finished in 225 ms. Jan 07 09:16:42 managed-node2 systemd[1]: Stopping quadlet-demo-mysql.service... ░░ Subject: A stop job for unit quadlet-demo-mysql.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit quadlet-demo-mysql.service has begun execution. ░░ ░░ The job identifier is 4022. Jan 07 09:16:44 managed-node2 podman[52450]: 2025-01-07 09:16:44.046008845 -0500 EST m=+1.080664683 container died c22d23c0f7b46cca50b2c4b3810f209784c1310f514d6666ef9df66132609bfd (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service) Jan 07 09:16:44 managed-node2 systemd[1]: c22d23c0f7b46cca50b2c4b3810f209784c1310f514d6666ef9df66132609bfd-7ac923f5855b5f9d.timer: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit c22d23c0f7b46cca50b2c4b3810f209784c1310f514d6666ef9df66132609bfd-7ac923f5855b5f9d.timer has successfully entered the 'dead' state. Jan 07 09:16:44 managed-node2 systemd[1]: Stopped c22d23c0f7b46cca50b2c4b3810f209784c1310f514d6666ef9df66132609bfd-7ac923f5855b5f9d.timer - /usr/bin/podman healthcheck run c22d23c0f7b46cca50b2c4b3810f209784c1310f514d6666ef9df66132609bfd. ░░ Subject: A stop job for unit c22d23c0f7b46cca50b2c4b3810f209784c1310f514d6666ef9df66132609bfd-7ac923f5855b5f9d.timer has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit c22d23c0f7b46cca50b2c4b3810f209784c1310f514d6666ef9df66132609bfd-7ac923f5855b5f9d.timer has finished. ░░ ░░ The job identifier is 4023 and the job result is done. Jan 07 09:16:44 managed-node2 kernel: podman2: port 1(veth2) entered disabled state Jan 07 09:16:44 managed-node2 kernel: veth2 (unregistering): left allmulticast mode Jan 07 09:16:44 managed-node2 kernel: veth2 (unregistering): left promiscuous mode Jan 07 09:16:44 managed-node2 kernel: podman2: port 1(veth2) entered disabled state Jan 07 09:16:44 managed-node2 NetworkManager[780]: [1736259404.1094] device (podman2): state change: activated -> unmanaged (reason 'unmanaged', managed-type: 'removed') Jan 07 09:16:44 managed-node2 systemd[1]: Starting NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service... ░░ Subject: A start job for unit NetworkManager-dispatcher.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has begun execution. ░░ ░░ The job identifier is 4026. Jan 07 09:16:44 managed-node2 systemd[1]: Started NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service. ░░ Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has finished successfully. ░░ ░░ The job identifier is 4026. Jan 07 09:16:44 managed-node2 systemd[1]: run-netns-netns\x2d6c39df41\x2df379\x2ded47\x2df325\x2d7b83d7f2ce92.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-netns-netns\x2d6c39df41\x2df379\x2ded47\x2df325\x2d7b83d7f2ce92.mount has successfully entered the 'dead' state. Jan 07 09:16:44 managed-node2 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-c22d23c0f7b46cca50b2c4b3810f209784c1310f514d6666ef9df66132609bfd-userdata-shm.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay\x2dcontainers-c22d23c0f7b46cca50b2c4b3810f209784c1310f514d6666ef9df66132609bfd-userdata-shm.mount has successfully entered the 'dead' state. Jan 07 09:16:44 managed-node2 systemd[1]: var-lib-containers-storage-overlay-e484b1cd2be48eef350baec6d3a0f0bb3ee55357e34fd535a80b9a07b93bb00e-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-e484b1cd2be48eef350baec6d3a0f0bb3ee55357e34fd535a80b9a07b93bb00e-merged.mount has successfully entered the 'dead' state. Jan 07 09:16:44 managed-node2 podman[52450]: 2025-01-07 09:16:44.221462232 -0500 EST m=+1.256117801 container remove c22d23c0f7b46cca50b2c4b3810f209784c1310f514d6666ef9df66132609bfd (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service) Jan 07 09:16:44 managed-node2 quadlet-demo-mysql[52450]: c22d23c0f7b46cca50b2c4b3810f209784c1310f514d6666ef9df66132609bfd Jan 07 09:16:44 managed-node2 systemd[1]: quadlet-demo-mysql.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit quadlet-demo-mysql.service has successfully entered the 'dead' state. Jan 07 09:16:44 managed-node2 systemd[1]: Stopped quadlet-demo-mysql.service. ░░ Subject: A stop job for unit quadlet-demo-mysql.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit quadlet-demo-mysql.service has finished. ░░ ░░ The job identifier is 4022 and the job result is done. Jan 07 09:16:44 managed-node2 python3.12[52628]: ansible-stat Invoked with path=/etc/containers/systemd/quadlet-demo-mysql.container follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 07 09:16:46 managed-node2 python3.12[52892]: ansible-file Invoked with path=/etc/containers/systemd/quadlet-demo-mysql.container state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 07 09:16:46 managed-node2 python3.12[53023]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 07 09:16:46 managed-node2 systemd[1]: Reload requested from client PID 53024 ('systemctl') (unit session-6.scope)... Jan 07 09:16:46 managed-node2 systemd[1]: Reloading... Jan 07 09:16:47 managed-node2 systemd[1]: Reloading finished in 216 ms. Jan 07 09:16:48 managed-node2 python3.12[53349]: ansible-ansible.legacy.command Invoked with _raw_params=podman image prune --all -f _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 07 09:16:48 managed-node2 podman[53350]: 2025-01-07 09:16:48.437319183 -0500 EST m=+0.235803970 image untag dd3b2a5dcb48ff61113592ed5ddd762581be4387c7bc552375a2159422aa6bf5 quay.io/linux-system-roles/mysql:5.6 Jan 07 09:16:48 managed-node2 podman[53350]: 2025-01-07 09:16:48.218292756 -0500 EST m=+0.016777578 image remove dd3b2a5dcb48ff61113592ed5ddd762581be4387c7bc552375a2159422aa6bf5 Jan 07 09:16:49 managed-node2 python3.12[53487]: ansible-ansible.legacy.command Invoked with _raw_params=podman images -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 07 09:16:49 managed-node2 python3.12[53626]: ansible-ansible.legacy.command Invoked with _raw_params=podman volume ls -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 07 09:16:49 managed-node2 python3.12[53765]: ansible-ansible.legacy.command Invoked with _raw_params=podman ps --noheading _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 07 09:16:50 managed-node2 python3.12[53904]: ansible-ansible.legacy.command Invoked with _raw_params=podman network ls -n -q _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 07 09:16:51 managed-node2 python3.12[54318]: ansible-service_facts Invoked Jan 07 09:16:54 managed-node2 python3.12[54555]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 07 09:16:54 managed-node2 systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state. Jan 07 09:16:55 managed-node2 python3.12[54689]: ansible-systemd Invoked with name=quadlet-demo-mysql-volume.service scope=system state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None Jan 07 09:16:55 managed-node2 systemd[1]: Reload requested from client PID 54692 ('systemctl') (unit session-6.scope)... Jan 07 09:16:55 managed-node2 systemd[1]: Reloading... Jan 07 09:16:55 managed-node2 systemd[1]: Reloading finished in 216 ms. Jan 07 09:16:55 managed-node2 systemd[1]: quadlet-demo-mysql-volume.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit quadlet-demo-mysql-volume.service has successfully entered the 'dead' state. Jan 07 09:16:55 managed-node2 systemd[1]: Stopped quadlet-demo-mysql-volume.service. ░░ Subject: A stop job for unit quadlet-demo-mysql-volume.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit quadlet-demo-mysql-volume.service has finished. ░░ ░░ The job identifier is 4105 and the job result is done. Jan 07 09:16:56 managed-node2 python3.12[54880]: ansible-stat Invoked with path=/etc/containers/systemd/quadlet-demo-mysql.volume follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 07 09:16:57 managed-node2 python3.12[55144]: ansible-file Invoked with path=/etc/containers/systemd/quadlet-demo-mysql.volume state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 07 09:16:58 managed-node2 python3.12[55275]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 07 09:16:58 managed-node2 systemd[1]: Reload requested from client PID 55276 ('systemctl') (unit session-6.scope)... Jan 07 09:16:58 managed-node2 systemd[1]: Reloading... Jan 07 09:16:58 managed-node2 systemd[1]: Reloading finished in 218 ms. Jan 07 09:16:58 managed-node2 podman[55463]: 2025-01-07 09:16:58.680556885 -0500 EST m=+0.026755127 volume remove systemd-quadlet-demo-mysql Jan 07 09:16:59 managed-node2 python3.12[55601]: ansible-ansible.legacy.command Invoked with _raw_params=podman image prune --all -f _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 07 09:16:59 managed-node2 python3.12[55739]: ansible-ansible.legacy.command Invoked with _raw_params=podman images -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 07 09:17:00 managed-node2 python3.12[55877]: ansible-ansible.legacy.command Invoked with _raw_params=podman volume ls -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 07 09:17:00 managed-node2 python3.12[56016]: ansible-ansible.legacy.command Invoked with _raw_params=podman ps --noheading _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 07 09:17:01 managed-node2 python3.12[56154]: ansible-ansible.legacy.command Invoked with _raw_params=podman network ls -n -q _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 07 09:17:02 managed-node2 python3.12[56569]: ansible-service_facts Invoked Jan 07 09:17:05 managed-node2 python3.12[56807]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 07 09:17:07 managed-node2 python3.12[56940]: ansible-systemd Invoked with name=quadlet-demo-network.service scope=system state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None Jan 07 09:17:07 managed-node2 systemd[1]: Reload requested from client PID 56943 ('systemctl') (unit session-6.scope)... Jan 07 09:17:07 managed-node2 systemd[1]: Reloading... Jan 07 09:17:07 managed-node2 systemd[1]: Reloading finished in 211 ms. Jan 07 09:17:07 managed-node2 systemd[1]: quadlet-demo-network.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit quadlet-demo-network.service has successfully entered the 'dead' state. Jan 07 09:17:07 managed-node2 systemd[1]: Stopped quadlet-demo-network.service. ░░ Subject: A stop job for unit quadlet-demo-network.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit quadlet-demo-network.service has finished. ░░ ░░ The job identifier is 4106 and the job result is done. Jan 07 09:17:07 managed-node2 python3.12[57132]: ansible-stat Invoked with path=/etc/containers/systemd/quadlet-demo.network follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 07 09:17:09 managed-node2 python3.12[57396]: ansible-file Invoked with path=/etc/containers/systemd/quadlet-demo.network state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 07 09:17:09 managed-node2 python3.12[57527]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 07 09:17:09 managed-node2 systemd[1]: Reload requested from client PID 57528 ('systemctl') (unit session-6.scope)... Jan 07 09:17:09 managed-node2 systemd[1]: Reloading... Jan 07 09:17:09 managed-node2 systemd[1]: Reloading finished in 208 ms. Jan 07 09:17:10 managed-node2 python3.12[57852]: ansible-ansible.legacy.command Invoked with _raw_params=podman image prune --all -f _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 07 09:17:11 managed-node2 python3.12[57990]: ansible-ansible.legacy.command Invoked with _raw_params=podman images -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 07 09:17:12 managed-node2 python3.12[58128]: ansible-ansible.legacy.command Invoked with _raw_params=podman volume ls -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 07 09:17:12 managed-node2 python3.12[58266]: ansible-ansible.legacy.command Invoked with _raw_params=podman ps --noheading _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 07 09:17:13 managed-node2 python3.12[58404]: ansible-ansible.legacy.command Invoked with _raw_params=podman network ls -n -q _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 07 09:17:14 managed-node2 python3.12[58819]: ansible-service_facts Invoked Jan 07 09:17:16 managed-node2 python3.12[59057]: ansible-ansible.legacy.command Invoked with _raw_params=exec 1>&2 set -x set -o pipefail systemctl list-units --plain -l --all | grep quadlet || : systemctl list-unit-files --all | grep quadlet || : systemctl list-units --plain --failed -l --all | grep quadlet || : _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 07 09:17:17 managed-node2 python3.12[59195]: ansible-ansible.legacy.command Invoked with _raw_params=journalctl -ex _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None PLAY RECAP ********************************************************************* managed-node2 : ok=409 changed=47 unreachable=0 failed=2 skipped=442 rescued=2 ignored=0 TASKS RECAP ******************************************************************** Tuesday 07 January 2025 09:17:17 -0500 (0:00:00.502) 0:03:50.659 ******* =============================================================================== Check web -------------------------------------------------------------- 33.16s /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:121 fedora.linux_system_roles.podman : Ensure container images are present -- 28.08s /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 fedora.linux_system_roles.podman : Ensure container images are present --- 8.28s /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 fedora.linux_system_roles.certificate : Ensure provider packages are installed --- 3.31s /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:23 fedora.linux_system_roles.podman : For testing and debugging - services --- 3.14s /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 fedora.linux_system_roles.certificate : Ensure certificate role dependencies are installed --- 2.65s /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:5 fedora.linux_system_roles.podman : For testing and debugging - services --- 2.50s /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 fedora.linux_system_roles.podman : Start service ------------------------ 2.47s /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110 fedora.linux_system_roles.podman : For testing and debugging - services --- 2.27s /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 fedora.linux_system_roles.podman : Stop and disable service ------------- 2.24s /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 fedora.linux_system_roles.podman : For testing and debugging - services --- 2.04s /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 fedora.linux_system_roles.podman : For testing and debugging - services --- 1.97s /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 fedora.linux_system_roles.podman : For testing and debugging - services --- 1.94s /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 fedora.linux_system_roles.podman : Remove volumes ----------------------- 1.50s /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99 Gathering Facts --------------------------------------------------------- 1.37s /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:9 fedora.linux_system_roles.podman : Stop and disable service ------------- 1.35s /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 fedora.linux_system_roles.certificate : Slurp the contents of the files --- 1.22s /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:152 fedora.linux_system_roles.certificate : Ensure provider service is running --- 1.21s /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:90 fedora.linux_system_roles.podman : Prune images no longer in use -------- 1.18s /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:120 fedora.linux_system_roles.firewall : Configure firewall ----------------- 1.18s /tmp/collections-Ij3/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:71