ansible-playbook [core 2.17.7] config file = None configured module search path = ['/root/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules'] ansible python module location = /usr/local/lib/python3.12/site-packages/ansible ansible collection location = /tmp/collections-PRc executable location = /usr/local/bin/ansible-playbook python version = 3.12.8 (main, Dec 3 2024, 00:00:00) [GCC 14.2.1 20241104 (Red Hat 14.2.1-6)] (/usr/bin/python3.12) jinja version = 3.1.4 libyaml = True No config file found; using defaults running playbook inside collection fedora.linux_system_roles redirecting (type: callback) ansible.builtin.debug to ansible.posix.debug redirecting (type: callback) ansible.builtin.debug to ansible.posix.debug redirecting (type: callback) ansible.builtin.profile_tasks to ansible.posix.profile_tasks Skipping callback 'default', as we already have a stdout callback. Skipping callback 'minimal', as we already have a stdout callback. Skipping callback 'oneline', as we already have a stdout callback. PLAYBOOK: tests_quadlet_basic.yml ********************************************** 2 plays in /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_basic.yml PLAY [all] ********************************************************************* TASK [Include vault variables] ************************************************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_basic.yml:5 Saturday 18 January 2025 11:29:18 -0500 (0:00:00.007) 0:00:00.007 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_test_password": { "__ansible_vault": "$ANSIBLE_VAULT;1.1;AES256\n35383939616163653333633431363463313831383037386236646138333162396161356130303461\n3932623930643263313563336163316337643562333936360a363538636631313039343233383732\n38666530383538656639363465313230343533386130303833336434303438333161656262346562\n3362626538613031640a663330613638366132356534363534353239616666653466353961323533\n6565\n" }, "mysql_container_root_password": { "__ansible_vault": "$ANSIBLE_VAULT;1.1;AES256\n61333932373230333539663035366431326163363166363036323963623131363530326231303634\n6635326161643165363366323062333334363730376631660a393566366139353861656364656661\n38653463363837336639363032646433666361646535366137303464623261313663643336306465\n6264663730656337310a343962353137386238383064646533366433333437303566656433386233\n34343235326665646661623131643335313236313131353661386338343366316261643634653633\n3832313034366536616531323963333234326461353130303532\n" } }, "ansible_included_var_files": [ "/tmp/podman-JDA/tests/vars/vault-variables.yml" ], "changed": false } PLAY [Ensure that the role can manage quadlet specs] *************************** TASK [Gathering Facts] ********************************************************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_basic.yml:9 Saturday 18 January 2025 11:29:18 -0500 (0:00:00.038) 0:00:00.045 ****** [WARNING]: Platform linux on host managed-node3 is using the discovered Python interpreter at /usr/bin/python3.12, but future installation of another Python interpreter could change the meaning of that path. See https://docs.ansible.com/ansible- core/2.17/reference_appendices/interpreter_discovery.html for more information. ok: [managed-node3] TASK [Test is only supported on x86_64] **************************************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_basic.yml:55 Saturday 18 January 2025 11:29:19 -0500 (0:00:01.191) 0:00:01.237 ****** skipping: [managed-node3] => { "false_condition": "ansible_facts[\"architecture\"] != \"x86_64\"" } TASK [End test] **************************************************************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_basic.yml:62 Saturday 18 January 2025 11:29:20 -0500 (0:00:00.023) 0:00:01.260 ****** META: end_play conditional evaluated to False, continuing play skipping: [managed-node3] => { "skip_reason": "end_play conditional evaluated to False, continuing play" } MSG: end_play TASK [Run role - do not pull images] ******************************************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_basic.yml:70 Saturday 18 January 2025 11:29:20 -0500 (0:00:00.012) 0:00:01.272 ****** included: fedora.linux_system_roles.podman for managed-node3 TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:3 Saturday 18 January 2025 11:29:20 -0500 (0:00:00.077) 0:00:01.349 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure ansible_facts used by role] **** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:3 Saturday 18 January 2025 11:29:20 -0500 (0:00:00.036) 0:00:01.386 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if system is ostree] ************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:11 Saturday 18 January 2025 11:29:20 -0500 (0:00:00.050) 0:00:01.437 ****** ok: [managed-node3] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.podman : Set flag to indicate system is ostree] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:16 Saturday 18 January 2025 11:29:20 -0500 (0:00:00.512) 0:00:01.949 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_is_ostree": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:23 Saturday 18 January 2025 11:29:20 -0500 (0:00:00.023) 0:00:01.973 ****** ok: [managed-node3] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.podman : Set flag if transactional-update exists] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:28 Saturday 18 January 2025 11:29:21 -0500 (0:00:00.363) 0:00:02.336 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_is_transactional": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:32 Saturday 18 January 2025 11:29:21 -0500 (0:00:00.023) 0:00:02.359 ****** ok: [managed-node3] => (item=RedHat.yml) => { "ansible_facts": { "__podman_packages": [ "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/vars/RedHat.yml" ], "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml" } skipping: [managed-node3] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node3] => (item=CentOS_10.yml) => { "ansible_facts": { "__podman_packages": [ "iptables-nft", "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } ok: [managed-node3] => (item=CentOS_10.yml) => { "ansible_facts": { "__podman_packages": [ "iptables-nft", "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } TASK [fedora.linux_system_roles.podman : Gather the package facts] ************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 Saturday 18 January 2025 11:29:21 -0500 (0:00:00.045) 0:00:02.404 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Enable copr if requested] ************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:10 Saturday 18 January 2025 11:29:22 -0500 (0:00:01.198) 0:00:03.603 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_use_copr | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure required packages are installed] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:14 Saturday 18 January 2025 11:29:22 -0500 (0:00:00.045) 0:00:03.649 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "(__podman_packages | difference(ansible_facts.packages))", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:28 Saturday 18 January 2025 11:29:22 -0500 (0:00:00.068) 0:00:03.718 ****** skipping: [managed-node3] => { "false_condition": "__podman_is_transactional | d(false)" } TASK [fedora.linux_system_roles.podman : Reboot transactional update systems] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:33 Saturday 18 January 2025 11:29:22 -0500 (0:00:00.052) 0:00:03.770 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if reboot is needed and not set] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:38 Saturday 18 January 2025 11:29:22 -0500 (0:00:00.072) 0:00:03.842 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get podman version] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:46 Saturday 18 January 2025 11:29:22 -0500 (0:00:00.074) 0:00:03.917 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "--version" ], "delta": "0:00:00.025266", "end": "2025-01-18 11:29:23.096009", "rc": 0, "start": "2025-01-18 11:29:23.070743" } STDOUT: podman version 5.3.1 TASK [fedora.linux_system_roles.podman : Set podman version] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:52 Saturday 18 January 2025 11:29:23 -0500 (0:00:00.492) 0:00:04.410 ****** ok: [managed-node3] => { "ansible_facts": { "podman_version": "5.3.1" }, "changed": false } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.2 or later] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:56 Saturday 18 January 2025 11:29:23 -0500 (0:00:00.031) 0:00:04.441 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_version is version(\"4.2\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:63 Saturday 18 January 2025 11:29:23 -0500 (0:00:00.029) 0:00:04.471 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_version is version(\"4.4\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:73 Saturday 18 January 2025 11:29:23 -0500 (0:00:00.052) 0:00:04.523 ****** META: end_host conditional evaluated to False, continuing execution for managed-node3 skipping: [managed-node3] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node3" } MSG: end_host conditional evaluated to false, continuing execution for managed-node3 TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:80 Saturday 18 January 2025 11:29:23 -0500 (0:00:00.049) 0:00:04.573 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__has_type_pod or __has_pod_file_ext or __has_pod_file_src_ext or __has_pod_template_src_ext or __has_pod_template_src_ext_j2", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:96 Saturday 18 January 2025 11:29:23 -0500 (0:00:00.042) 0:00:04.615 ****** META: end_host conditional evaluated to False, continuing execution for managed-node3 skipping: [managed-node3] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node3" } MSG: end_host conditional evaluated to false, continuing execution for managed-node3 TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:109 Saturday 18 January 2025 11:29:23 -0500 (0:00:00.068) 0:00:04.683 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 18 January 2025 11:29:23 -0500 (0:00:00.079) 0:00:04.763 ****** ok: [managed-node3] => { "ansible_facts": { "getent_passwd": { "root": [ "x", "0", "0", "Super User", "/root", "/bin/bash" ] } }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 18 January 2025 11:29:23 -0500 (0:00:00.465) 0:00:05.229 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 18 January 2025 11:29:24 -0500 (0:00:00.032) 0:00:05.262 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 18 January 2025 11:29:24 -0500 (0:00:00.040) 0:00:05.302 ****** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1737217683.9532003, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1737217679.1402063, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9160785, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "1643853349", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 18 January 2025 11:29:24 -0500 (0:00:00.382) 0:00:05.684 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 18 January 2025 11:29:24 -0500 (0:00:00.030) 0:00:05.715 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 18 January 2025 11:29:24 -0500 (0:00:00.030) 0:00:05.746 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 18 January 2025 11:29:24 -0500 (0:00:00.037) 0:00:05.783 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 18 January 2025 11:29:24 -0500 (0:00:00.047) 0:00:05.831 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 18 January 2025 11:29:24 -0500 (0:00:00.030) 0:00:05.861 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 18 January 2025 11:29:24 -0500 (0:00:00.031) 0:00:05.892 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 18 January 2025 11:29:24 -0500 (0:00:00.029) 0:00:05.921 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set config file paths] **************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:115 Saturday 18 January 2025 11:29:24 -0500 (0:00:00.030) 0:00:05.952 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_container_conf_file": "/etc/containers/containers.conf.d/50-systemroles.conf", "__podman_policy_json_file": "/etc/containers/policy.json", "__podman_registries_conf_file": "/etc/containers/registries.conf.d/50-systemroles.conf", "__podman_storage_conf_file": "/etc/containers/storage.conf" }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle container.conf.d] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:124 Saturday 18 January 2025 11:29:24 -0500 (0:00:00.052) 0:00:06.004 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure containers.d exists] *********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:5 Saturday 18 January 2025 11:29:24 -0500 (0:00:00.053) 0:00:06.058 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update container config file] ********* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:13 Saturday 18 January 2025 11:29:24 -0500 (0:00:00.030) 0:00:06.088 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle registries.conf.d] ************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:127 Saturday 18 January 2025 11:29:24 -0500 (0:00:00.028) 0:00:06.116 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure registries.d exists] *********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:5 Saturday 18 January 2025 11:29:24 -0500 (0:00:00.055) 0:00:06.172 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update registries config file] ******** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:13 Saturday 18 January 2025 11:29:24 -0500 (0:00:00.059) 0:00:06.231 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle storage.conf] ****************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:130 Saturday 18 January 2025 11:29:25 -0500 (0:00:00.028) 0:00:06.259 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure storage.conf parent dir exists] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:5 Saturday 18 January 2025 11:29:25 -0500 (0:00:00.056) 0:00:06.316 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update storage config file] *********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:13 Saturday 18 January 2025 11:29:25 -0500 (0:00:00.029) 0:00:06.346 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle policy.json] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:133 Saturday 18 January 2025 11:29:25 -0500 (0:00:00.028) 0:00:06.374 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure policy.json parent dir exists] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:6 Saturday 18 January 2025 11:29:25 -0500 (0:00:00.057) 0:00:06.432 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat the policy.json file] ************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:14 Saturday 18 January 2025 11:29:25 -0500 (0:00:00.029) 0:00:06.461 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get the existing policy.json] ********* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:19 Saturday 18 January 2025 11:29:25 -0500 (0:00:00.028) 0:00:06.490 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Write new policy.json file] *********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:25 Saturday 18 January 2025 11:29:25 -0500 (0:00:00.029) 0:00:06.519 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [Manage firewall for specified ports] ************************************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:139 Saturday 18 January 2025 11:29:25 -0500 (0:00:00.028) 0:00:06.548 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_firewall | length > 0", "skip_reason": "Conditional result was False" } TASK [Manage selinux for specified ports] ************************************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:146 Saturday 18 January 2025 11:29:25 -0500 (0:00:00.028) 0:00:06.576 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_selinux_ports | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Keep track of users that need to cancel linger] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:153 Saturday 18 January 2025 11:29:25 -0500 (0:00:00.027) 0:00:06.604 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_cancel_user_linger": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - present] ******* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:157 Saturday 18 January 2025 11:29:25 -0500 (0:00:00.029) 0:00:06.633 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle credential files - present] **** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:166 Saturday 18 January 2025 11:29:25 -0500 (0:00:00.025) 0:00:06.658 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle secrets] *********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:175 Saturday 18 January 2025 11:29:25 -0500 (0:00:00.025) 0:00:06.684 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Kubernetes specifications] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:182 Saturday 18 January 2025 11:29:25 -0500 (0:00:00.024) 0:00:06.709 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Quadlet specifications] ******** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:189 Saturday 18 January 2025 11:29:25 -0500 (0:00:00.025) 0:00:06.735 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node3 => (item=(censored due to no_log)) TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 18 January 2025 11:29:25 -0500 (0:00:00.070) 0:00:06.805 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": { "Container": { "ContainerName": "nopull", "Image": "quay.io/libpod/testimage:20210610" }, "Install": { "WantedBy": "default.target" } }, "__podman_quadlet_str": "", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 18 January 2025 11:29:25 -0500 (0:00:00.106) 0:00:06.912 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": false, "__podman_state": "created", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 18 January 2025 11:29:25 -0500 (0:00:00.058) 0:00:06.970 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_quadlet_spec | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 18 January 2025 11:29:25 -0500 (0:00:00.048) 0:00:07.018 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_name": "nopull", "__podman_quadlet_type": "container", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 18 January 2025 11:29:25 -0500 (0:00:00.073) 0:00:07.092 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 18 January 2025 11:29:25 -0500 (0:00:00.091) 0:00:07.184 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 18 January 2025 11:29:25 -0500 (0:00:00.051) 0:00:07.236 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 18 January 2025 11:29:26 -0500 (0:00:00.053) 0:00:07.289 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 18 January 2025 11:29:26 -0500 (0:00:00.068) 0:00:07.358 ****** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1737217683.9532003, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1737217679.1402063, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9160785, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "1643853349", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 18 January 2025 11:29:26 -0500 (0:00:00.434) 0:00:07.792 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 18 January 2025 11:29:26 -0500 (0:00:00.048) 0:00:07.841 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 18 January 2025 11:29:26 -0500 (0:00:00.051) 0:00:07.893 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 18 January 2025 11:29:26 -0500 (0:00:00.069) 0:00:07.962 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 18 January 2025 11:29:26 -0500 (0:00:00.053) 0:00:08.015 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 18 January 2025 11:29:26 -0500 (0:00:00.048) 0:00:08.064 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 18 January 2025 11:29:26 -0500 (0:00:00.047) 0:00:08.112 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 18 January 2025 11:29:26 -0500 (0:00:00.048) 0:00:08.160 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 18 January 2025 11:29:26 -0500 (0:00:00.047) 0:00:08.208 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_activate_systemd_unit": false, "__podman_images_found": [ "quay.io/libpod/testimage:20210610" ], "__podman_kube_yamls_raw": "", "__podman_service_name": "nopull.service", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 18 January 2025 11:29:27 -0500 (0:00:00.084) 0:00:08.292 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 18 January 2025 11:29:27 -0500 (0:00:00.050) 0:00:08.343 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_kube_yamls_raw | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 18 January 2025 11:29:27 -0500 (0:00:00.035) 0:00:08.378 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_images": [ "quay.io/libpod/testimage:20210610" ], "__podman_quadlet_file": "/etc/containers/systemd/nopull.container", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 18 January 2025 11:29:27 -0500 (0:00:00.083) 0:00:08.462 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 18 January 2025 11:29:27 -0500 (0:00:00.074) 0:00:08.537 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_state == \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 18 January 2025 11:29:27 -0500 (0:00:00.028) 0:00:08.565 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2 Saturday 18 January 2025 11:29:27 -0500 (0:00:00.065) 0:00:08.631 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 18 January 2025 11:29:27 -0500 (0:00:00.049) 0:00:08.681 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 18 January 2025 11:29:27 -0500 (0:00:00.031) 0:00:08.712 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 18 January 2025 11:29:27 -0500 (0:00:00.044) 0:00:08.756 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create host directories] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7 Saturday 18 January 2025 11:29:27 -0500 (0:00:00.043) 0:00:08.800 ****** skipping: [managed-node3] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Ensure container images are present] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 Saturday 18 January 2025 11:29:27 -0500 (0:00:00.043) 0:00:08.843 ****** skipping: [managed-node3] => (item=None) => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39 Saturday 18 January 2025 11:29:27 -0500 (0:00:00.058) 0:00:08.902 ****** ok: [managed-node3] => { "changed": false, "gid": 0, "group": "root", "mode": "0755", "owner": "root", "path": "/etc/containers/systemd", "secontext": "system_u:object_r:etc_t:s0", "size": 6, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ******** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:48 Saturday 18 January 2025 11:29:28 -0500 (0:00:00.617) 0:00:09.519 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_quadlet_file_src | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:58 Saturday 18 January 2025 11:29:28 -0500 (0:00:00.046) 0:00:09.565 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_quadlet_str | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] ******* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:70 Saturday 18 January 2025 11:29:28 -0500 (0:00:00.051) 0:00:09.617 ****** changed: [managed-node3] => { "changed": true, "checksum": "670d64fc68a9768edb20cad26df2acc703542d85", "dest": "/etc/containers/systemd/nopull.container", "gid": 0, "group": "root", "md5sum": "cedb6667f6cd1b033fe06e2810fe6b19", "mode": "0644", "owner": "root", "secontext": "system_u:object_r:etc_t:s0", "size": 151, "src": "/root/.ansible/tmp/ansible-tmp-1737217768.42563-11038-258453571987208/.source.container", "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.podman : Reload systemctl] ********************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:82 Saturday 18 January 2025 11:29:29 -0500 (0:00:00.942) 0:00:10.559 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_activate_systemd_unit | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Start service] ************************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110 Saturday 18 January 2025 11:29:29 -0500 (0:00:00.031) 0:00:10.591 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_activate_systemd_unit | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Restart service] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:125 Saturday 18 January 2025 11:29:29 -0500 (0:00:00.033) 0:00:10.624 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_activate_systemd_unit | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Cancel linger] ************************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:196 Saturday 18 January 2025 11:29:29 -0500 (0:00:00.034) 0:00:10.659 ****** skipping: [managed-node3] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Handle credential files - absent] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:202 Saturday 18 January 2025 11:29:29 -0500 (0:00:00.024) 0:00:10.684 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - absent] ******** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:211 Saturday 18 January 2025 11:29:29 -0500 (0:00:00.027) 0:00:10.711 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [Verify image not pulled] ************************************************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_basic.yml:86 Saturday 18 January 2025 11:29:29 -0500 (0:00:00.040) 0:00:10.752 ****** ok: [managed-node3] => { "changed": false } MSG: All assertions passed TASK [Run role - try to pull bogus image] ************************************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_basic.yml:90 Saturday 18 January 2025 11:29:29 -0500 (0:00:00.083) 0:00:10.835 ****** included: fedora.linux_system_roles.podman for managed-node3 TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:3 Saturday 18 January 2025 11:29:29 -0500 (0:00:00.117) 0:00:10.952 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure ansible_facts used by role] **** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:3 Saturday 18 January 2025 11:29:29 -0500 (0:00:00.056) 0:00:11.009 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if system is ostree] ************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:11 Saturday 18 January 2025 11:29:29 -0500 (0:00:00.039) 0:00:11.048 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag to indicate system is ostree] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:16 Saturday 18 January 2025 11:29:29 -0500 (0:00:00.030) 0:00:11.079 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:23 Saturday 18 January 2025 11:29:29 -0500 (0:00:00.028) 0:00:11.108 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag if transactional-update exists] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:28 Saturday 18 January 2025 11:29:29 -0500 (0:00:00.033) 0:00:11.141 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:32 Saturday 18 January 2025 11:29:29 -0500 (0:00:00.048) 0:00:11.190 ****** ok: [managed-node3] => (item=RedHat.yml) => { "ansible_facts": { "__podman_packages": [ "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/vars/RedHat.yml" ], "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml" } skipping: [managed-node3] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node3] => (item=CentOS_10.yml) => { "ansible_facts": { "__podman_packages": [ "iptables-nft", "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } ok: [managed-node3] => (item=CentOS_10.yml) => { "ansible_facts": { "__podman_packages": [ "iptables-nft", "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } TASK [fedora.linux_system_roles.podman : Gather the package facts] ************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 Saturday 18 January 2025 11:29:30 -0500 (0:00:00.067) 0:00:11.257 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Enable copr if requested] ************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:10 Saturday 18 January 2025 11:29:30 -0500 (0:00:00.843) 0:00:12.102 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_use_copr | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure required packages are installed] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:14 Saturday 18 January 2025 11:29:30 -0500 (0:00:00.030) 0:00:12.132 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "(__podman_packages | difference(ansible_facts.packages))", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:28 Saturday 18 January 2025 11:29:30 -0500 (0:00:00.034) 0:00:12.167 ****** skipping: [managed-node3] => { "false_condition": "__podman_is_transactional | d(false)" } TASK [fedora.linux_system_roles.podman : Reboot transactional update systems] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:33 Saturday 18 January 2025 11:29:30 -0500 (0:00:00.034) 0:00:12.201 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if reboot is needed and not set] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:38 Saturday 18 January 2025 11:29:31 -0500 (0:00:00.048) 0:00:12.250 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get podman version] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:46 Saturday 18 January 2025 11:29:31 -0500 (0:00:00.029) 0:00:12.279 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "--version" ], "delta": "0:00:00.024168", "end": "2025-01-18 11:29:31.377293", "rc": 0, "start": "2025-01-18 11:29:31.353125" } STDOUT: podman version 5.3.1 TASK [fedora.linux_system_roles.podman : Set podman version] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:52 Saturday 18 January 2025 11:29:31 -0500 (0:00:00.451) 0:00:12.730 ****** ok: [managed-node3] => { "ansible_facts": { "podman_version": "5.3.1" }, "changed": false } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.2 or later] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:56 Saturday 18 January 2025 11:29:31 -0500 (0:00:00.046) 0:00:12.777 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_version is version(\"4.2\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:63 Saturday 18 January 2025 11:29:31 -0500 (0:00:00.048) 0:00:12.826 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_version is version(\"4.4\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:73 Saturday 18 January 2025 11:29:31 -0500 (0:00:00.036) 0:00:12.863 ****** META: end_host conditional evaluated to False, continuing execution for managed-node3 skipping: [managed-node3] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node3" } MSG: end_host conditional evaluated to false, continuing execution for managed-node3 TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:80 Saturday 18 January 2025 11:29:31 -0500 (0:00:00.037) 0:00:12.900 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__has_type_pod or __has_pod_file_ext or __has_pod_file_src_ext or __has_pod_template_src_ext or __has_pod_template_src_ext_j2", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:96 Saturday 18 January 2025 11:29:31 -0500 (0:00:00.048) 0:00:12.948 ****** META: end_host conditional evaluated to False, continuing execution for managed-node3 skipping: [managed-node3] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node3" } MSG: end_host conditional evaluated to false, continuing execution for managed-node3 TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:109 Saturday 18 January 2025 11:29:31 -0500 (0:00:00.045) 0:00:12.994 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 18 January 2025 11:29:31 -0500 (0:00:00.057) 0:00:13.051 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 18 January 2025 11:29:31 -0500 (0:00:00.033) 0:00:13.084 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 18 January 2025 11:29:31 -0500 (0:00:00.034) 0:00:13.118 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 18 January 2025 11:29:31 -0500 (0:00:00.040) 0:00:13.158 ****** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1737217683.9532003, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1737217679.1402063, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9160785, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "1643853349", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 18 January 2025 11:29:32 -0500 (0:00:00.442) 0:00:13.601 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 18 January 2025 11:29:32 -0500 (0:00:00.052) 0:00:13.654 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 18 January 2025 11:29:32 -0500 (0:00:00.053) 0:00:13.707 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 18 January 2025 11:29:32 -0500 (0:00:00.051) 0:00:13.759 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 18 January 2025 11:29:32 -0500 (0:00:00.051) 0:00:13.810 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 18 January 2025 11:29:32 -0500 (0:00:00.050) 0:00:13.861 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 18 January 2025 11:29:32 -0500 (0:00:00.053) 0:00:13.915 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 18 January 2025 11:29:32 -0500 (0:00:00.052) 0:00:13.967 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set config file paths] **************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:115 Saturday 18 January 2025 11:29:32 -0500 (0:00:00.104) 0:00:14.072 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_container_conf_file": "/etc/containers/containers.conf.d/50-systemroles.conf", "__podman_policy_json_file": "/etc/containers/policy.json", "__podman_registries_conf_file": "/etc/containers/registries.conf.d/50-systemroles.conf", "__podman_storage_conf_file": "/etc/containers/storage.conf" }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle container.conf.d] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:124 Saturday 18 January 2025 11:29:32 -0500 (0:00:00.058) 0:00:14.130 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure containers.d exists] *********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:5 Saturday 18 January 2025 11:29:32 -0500 (0:00:00.097) 0:00:14.228 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update container config file] ********* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:13 Saturday 18 January 2025 11:29:33 -0500 (0:00:00.049) 0:00:14.278 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle registries.conf.d] ************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:127 Saturday 18 January 2025 11:29:33 -0500 (0:00:00.057) 0:00:14.335 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure registries.d exists] *********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:5 Saturday 18 January 2025 11:29:33 -0500 (0:00:00.115) 0:00:14.451 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update registries config file] ******** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:13 Saturday 18 January 2025 11:29:33 -0500 (0:00:00.048) 0:00:14.500 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle storage.conf] ****************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:130 Saturday 18 January 2025 11:29:33 -0500 (0:00:00.047) 0:00:14.548 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure storage.conf parent dir exists] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:5 Saturday 18 January 2025 11:29:33 -0500 (0:00:00.102) 0:00:14.651 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update storage config file] *********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:13 Saturday 18 January 2025 11:29:33 -0500 (0:00:00.035) 0:00:14.686 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle policy.json] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:133 Saturday 18 January 2025 11:29:33 -0500 (0:00:00.035) 0:00:14.722 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure policy.json parent dir exists] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:6 Saturday 18 January 2025 11:29:33 -0500 (0:00:00.069) 0:00:14.792 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat the policy.json file] ************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:14 Saturday 18 January 2025 11:29:33 -0500 (0:00:00.034) 0:00:14.826 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get the existing policy.json] ********* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:19 Saturday 18 January 2025 11:29:33 -0500 (0:00:00.029) 0:00:14.855 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Write new policy.json file] *********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:25 Saturday 18 January 2025 11:29:33 -0500 (0:00:00.030) 0:00:14.885 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [Manage firewall for specified ports] ************************************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:139 Saturday 18 January 2025 11:29:33 -0500 (0:00:00.029) 0:00:14.915 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_firewall | length > 0", "skip_reason": "Conditional result was False" } TASK [Manage selinux for specified ports] ************************************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:146 Saturday 18 January 2025 11:29:33 -0500 (0:00:00.030) 0:00:14.945 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_selinux_ports | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Keep track of users that need to cancel linger] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:153 Saturday 18 January 2025 11:29:33 -0500 (0:00:00.029) 0:00:14.975 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_cancel_user_linger": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - present] ******* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:157 Saturday 18 January 2025 11:29:33 -0500 (0:00:00.068) 0:00:15.043 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle credential files - present] **** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:166 Saturday 18 January 2025 11:29:33 -0500 (0:00:00.027) 0:00:15.070 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle secrets] *********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:175 Saturday 18 January 2025 11:29:33 -0500 (0:00:00.027) 0:00:15.098 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Kubernetes specifications] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:182 Saturday 18 January 2025 11:29:33 -0500 (0:00:00.026) 0:00:15.125 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Quadlet specifications] ******** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:189 Saturday 18 January 2025 11:29:33 -0500 (0:00:00.027) 0:00:15.153 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node3 => (item=(censored due to no_log)) TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 18 January 2025 11:29:33 -0500 (0:00:00.072) 0:00:15.225 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": { "Container": { "ContainerName": "bogus", "Image": "this_is_a_bogus_image" }, "Install": { "WantedBy": "default.target" } }, "__podman_quadlet_str": "", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 18 January 2025 11:29:34 -0500 (0:00:00.039) 0:00:15.265 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_continue_if_pull_fails": true, "__podman_pull_image": true, "__podman_state": "created", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 18 January 2025 11:29:34 -0500 (0:00:00.038) 0:00:15.304 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_quadlet_spec | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 18 January 2025 11:29:34 -0500 (0:00:00.028) 0:00:15.333 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_name": "bogus", "__podman_quadlet_type": "container", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 18 January 2025 11:29:34 -0500 (0:00:00.046) 0:00:15.379 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 18 January 2025 11:29:34 -0500 (0:00:00.097) 0:00:15.477 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 18 January 2025 11:29:34 -0500 (0:00:00.051) 0:00:15.529 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 18 January 2025 11:29:34 -0500 (0:00:00.052) 0:00:15.582 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 18 January 2025 11:29:34 -0500 (0:00:00.067) 0:00:15.650 ****** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1737217683.9532003, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1737217679.1402063, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9160785, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "1643853349", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 18 January 2025 11:29:34 -0500 (0:00:00.424) 0:00:16.074 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 18 January 2025 11:29:34 -0500 (0:00:00.032) 0:00:16.107 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 18 January 2025 11:29:34 -0500 (0:00:00.030) 0:00:16.137 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 18 January 2025 11:29:34 -0500 (0:00:00.032) 0:00:16.169 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 18 January 2025 11:29:34 -0500 (0:00:00.030) 0:00:16.199 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 18 January 2025 11:29:34 -0500 (0:00:00.031) 0:00:16.231 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 18 January 2025 11:29:35 -0500 (0:00:00.070) 0:00:16.302 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 18 January 2025 11:29:35 -0500 (0:00:00.051) 0:00:16.353 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 18 January 2025 11:29:35 -0500 (0:00:00.054) 0:00:16.408 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_activate_systemd_unit": false, "__podman_images_found": [ "this_is_a_bogus_image" ], "__podman_kube_yamls_raw": "", "__podman_service_name": "bogus.service", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 18 January 2025 11:29:35 -0500 (0:00:00.084) 0:00:16.492 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 18 January 2025 11:29:35 -0500 (0:00:00.065) 0:00:16.558 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_kube_yamls_raw | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 18 January 2025 11:29:35 -0500 (0:00:00.051) 0:00:16.609 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_images": [ "this_is_a_bogus_image" ], "__podman_quadlet_file": "/etc/containers/systemd/bogus.container", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 18 January 2025 11:29:35 -0500 (0:00:00.125) 0:00:16.735 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 18 January 2025 11:29:35 -0500 (0:00:00.058) 0:00:16.794 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_state == \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 18 January 2025 11:29:35 -0500 (0:00:00.047) 0:00:16.841 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2 Saturday 18 January 2025 11:29:35 -0500 (0:00:00.105) 0:00:16.947 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 18 January 2025 11:29:35 -0500 (0:00:00.085) 0:00:17.032 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 18 January 2025 11:29:35 -0500 (0:00:00.049) 0:00:17.081 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 18 January 2025 11:29:35 -0500 (0:00:00.046) 0:00:17.128 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create host directories] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7 Saturday 18 January 2025 11:29:35 -0500 (0:00:00.049) 0:00:17.178 ****** skipping: [managed-node3] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Ensure container images are present] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 Saturday 18 January 2025 11:29:35 -0500 (0:00:00.044) 0:00:17.222 ****** ok: [managed-node3] => (item=None) => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39 Saturday 18 January 2025 11:29:36 -0500 (0:00:00.896) 0:00:18.119 ****** ok: [managed-node3] => { "changed": false, "gid": 0, "group": "root", "mode": "0755", "owner": "root", "path": "/etc/containers/systemd", "secontext": "system_u:object_r:etc_t:s0", "size": 30, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ******** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:48 Saturday 18 January 2025 11:29:37 -0500 (0:00:00.451) 0:00:18.570 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_quadlet_file_src | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:58 Saturday 18 January 2025 11:29:37 -0500 (0:00:00.080) 0:00:18.651 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_quadlet_str | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] ******* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:70 Saturday 18 January 2025 11:29:37 -0500 (0:00:00.061) 0:00:18.712 ****** changed: [managed-node3] => { "changed": true, "checksum": "1d087e679d135214e8ac9ccaf33b2222916efb7f", "dest": "/etc/containers/systemd/bogus.container", "gid": 0, "group": "root", "md5sum": "97480a9a73734d9f8007d2c06e7fed1f", "mode": "0644", "owner": "root", "secontext": "system_u:object_r:etc_t:s0", "size": 138, "src": "/root/.ansible/tmp/ansible-tmp-1737217777.5201657-11373-52709709939833/.source.container", "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.podman : Reload systemctl] ********************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:82 Saturday 18 January 2025 11:29:38 -0500 (0:00:00.854) 0:00:19.567 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_activate_systemd_unit | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Start service] ************************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110 Saturday 18 January 2025 11:29:38 -0500 (0:00:00.044) 0:00:19.612 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_activate_systemd_unit | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Restart service] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:125 Saturday 18 January 2025 11:29:38 -0500 (0:00:00.038) 0:00:19.651 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_activate_systemd_unit | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Cancel linger] ************************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:196 Saturday 18 January 2025 11:29:38 -0500 (0:00:00.039) 0:00:19.690 ****** skipping: [managed-node3] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Handle credential files - absent] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:202 Saturday 18 January 2025 11:29:38 -0500 (0:00:00.051) 0:00:19.742 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - absent] ******** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:211 Saturday 18 January 2025 11:29:38 -0500 (0:00:00.037) 0:00:19.779 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [Verify image not pulled and no error] ************************************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_basic.yml:106 Saturday 18 January 2025 11:29:38 -0500 (0:00:00.041) 0:00:19.821 ****** ok: [managed-node3] => { "changed": false } MSG: All assertions passed TASK [Cleanup] ***************************************************************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_basic.yml:113 Saturday 18 January 2025 11:29:38 -0500 (0:00:00.033) 0:00:19.854 ****** included: fedora.linux_system_roles.podman for managed-node3 => (item=nopull) included: fedora.linux_system_roles.podman for managed-node3 => (item=bogus) TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:3 Saturday 18 January 2025 11:29:38 -0500 (0:00:00.155) 0:00:20.010 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure ansible_facts used by role] **** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:3 Saturday 18 January 2025 11:29:38 -0500 (0:00:00.048) 0:00:20.059 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if system is ostree] ************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:11 Saturday 18 January 2025 11:29:38 -0500 (0:00:00.035) 0:00:20.095 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag to indicate system is ostree] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:16 Saturday 18 January 2025 11:29:38 -0500 (0:00:00.028) 0:00:20.124 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:23 Saturday 18 January 2025 11:29:38 -0500 (0:00:00.029) 0:00:20.153 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag if transactional-update exists] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:28 Saturday 18 January 2025 11:29:38 -0500 (0:00:00.028) 0:00:20.182 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:32 Saturday 18 January 2025 11:29:38 -0500 (0:00:00.029) 0:00:20.211 ****** [WARNING]: TASK: fedora.linux_system_roles.podman : Set platform/version specific variables: The loop variable 'item' is already in use. You should set the `loop_var` value in the `loop_control` option for the task to something else to avoid variable collisions and unexpected behavior. ok: [managed-node3] => (item=RedHat.yml) => { "ansible_facts": { "__podman_packages": [ "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/vars/RedHat.yml" ], "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml" } skipping: [managed-node3] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node3] => (item=CentOS_10.yml) => { "ansible_facts": { "__podman_packages": [ "iptables-nft", "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } ok: [managed-node3] => (item=CentOS_10.yml) => { "ansible_facts": { "__podman_packages": [ "iptables-nft", "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } TASK [fedora.linux_system_roles.podman : Gather the package facts] ************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 Saturday 18 January 2025 11:29:39 -0500 (0:00:00.064) 0:00:20.276 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Enable copr if requested] ************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:10 Saturday 18 January 2025 11:29:40 -0500 (0:00:00.973) 0:00:21.250 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_use_copr | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure required packages are installed] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:14 Saturday 18 January 2025 11:29:40 -0500 (0:00:00.043) 0:00:21.293 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "(__podman_packages | difference(ansible_facts.packages))", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:28 Saturday 18 January 2025 11:29:40 -0500 (0:00:00.037) 0:00:21.330 ****** skipping: [managed-node3] => { "false_condition": "__podman_is_transactional | d(false)" } TASK [fedora.linux_system_roles.podman : Reboot transactional update systems] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:33 Saturday 18 January 2025 11:29:40 -0500 (0:00:00.036) 0:00:21.366 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if reboot is needed and not set] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:38 Saturday 18 January 2025 11:29:40 -0500 (0:00:00.039) 0:00:21.406 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get podman version] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:46 Saturday 18 January 2025 11:29:40 -0500 (0:00:00.034) 0:00:21.440 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "--version" ], "delta": "0:00:00.024373", "end": "2025-01-18 11:29:40.526226", "rc": 0, "start": "2025-01-18 11:29:40.501853" } STDOUT: podman version 5.3.1 TASK [fedora.linux_system_roles.podman : Set podman version] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:52 Saturday 18 January 2025 11:29:40 -0500 (0:00:00.434) 0:00:21.875 ****** ok: [managed-node3] => { "ansible_facts": { "podman_version": "5.3.1" }, "changed": false } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.2 or later] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:56 Saturday 18 January 2025 11:29:40 -0500 (0:00:00.054) 0:00:21.930 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_version is version(\"4.2\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:63 Saturday 18 January 2025 11:29:40 -0500 (0:00:00.044) 0:00:21.974 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_version is version(\"4.4\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:73 Saturday 18 January 2025 11:29:40 -0500 (0:00:00.051) 0:00:22.026 ****** META: end_host conditional evaluated to False, continuing execution for managed-node3 skipping: [managed-node3] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node3" } MSG: end_host conditional evaluated to false, continuing execution for managed-node3 TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:80 Saturday 18 January 2025 11:29:40 -0500 (0:00:00.053) 0:00:22.080 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__has_type_pod or __has_pod_file_ext or __has_pod_file_src_ext or __has_pod_template_src_ext or __has_pod_template_src_ext_j2", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:96 Saturday 18 January 2025 11:29:40 -0500 (0:00:00.067) 0:00:22.147 ****** META: end_host conditional evaluated to False, continuing execution for managed-node3 skipping: [managed-node3] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node3" } MSG: end_host conditional evaluated to false, continuing execution for managed-node3 TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:109 Saturday 18 January 2025 11:29:40 -0500 (0:00:00.060) 0:00:22.207 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 18 January 2025 11:29:41 -0500 (0:00:00.066) 0:00:22.274 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 18 January 2025 11:29:41 -0500 (0:00:00.040) 0:00:22.314 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 18 January 2025 11:29:41 -0500 (0:00:00.033) 0:00:22.348 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 18 January 2025 11:29:41 -0500 (0:00:00.078) 0:00:22.427 ****** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1737217683.9532003, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1737217679.1402063, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9160785, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "1643853349", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 18 January 2025 11:29:41 -0500 (0:00:00.404) 0:00:22.831 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 18 January 2025 11:29:41 -0500 (0:00:00.052) 0:00:22.883 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 18 January 2025 11:29:41 -0500 (0:00:00.066) 0:00:22.949 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 18 January 2025 11:29:41 -0500 (0:00:00.062) 0:00:23.012 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 18 January 2025 11:29:41 -0500 (0:00:00.049) 0:00:23.062 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 18 January 2025 11:29:41 -0500 (0:00:00.053) 0:00:23.115 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 18 January 2025 11:29:41 -0500 (0:00:00.050) 0:00:23.165 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 18 January 2025 11:29:41 -0500 (0:00:00.050) 0:00:23.215 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set config file paths] **************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:115 Saturday 18 January 2025 11:29:42 -0500 (0:00:00.049) 0:00:23.265 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_container_conf_file": "/etc/containers/containers.conf.d/50-systemroles.conf", "__podman_policy_json_file": "/etc/containers/policy.json", "__podman_registries_conf_file": "/etc/containers/registries.conf.d/50-systemroles.conf", "__podman_storage_conf_file": "/etc/containers/storage.conf" }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle container.conf.d] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:124 Saturday 18 January 2025 11:29:42 -0500 (0:00:00.062) 0:00:23.327 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure containers.d exists] *********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:5 Saturday 18 January 2025 11:29:42 -0500 (0:00:00.092) 0:00:23.419 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update container config file] ********* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:13 Saturday 18 January 2025 11:29:42 -0500 (0:00:00.048) 0:00:23.468 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle registries.conf.d] ************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:127 Saturday 18 January 2025 11:29:42 -0500 (0:00:00.050) 0:00:23.518 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure registries.d exists] *********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:5 Saturday 18 January 2025 11:29:42 -0500 (0:00:00.094) 0:00:23.613 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update registries config file] ******** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:13 Saturday 18 January 2025 11:29:42 -0500 (0:00:00.048) 0:00:23.662 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle storage.conf] ****************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:130 Saturday 18 January 2025 11:29:42 -0500 (0:00:00.050) 0:00:23.712 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure storage.conf parent dir exists] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:5 Saturday 18 January 2025 11:29:42 -0500 (0:00:00.093) 0:00:23.806 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update storage config file] *********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:13 Saturday 18 January 2025 11:29:42 -0500 (0:00:00.048) 0:00:23.854 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle policy.json] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:133 Saturday 18 January 2025 11:29:42 -0500 (0:00:00.102) 0:00:23.957 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure policy.json parent dir exists] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:6 Saturday 18 January 2025 11:29:42 -0500 (0:00:00.103) 0:00:24.061 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat the policy.json file] ************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:14 Saturday 18 January 2025 11:29:42 -0500 (0:00:00.051) 0:00:24.112 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get the existing policy.json] ********* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:19 Saturday 18 January 2025 11:29:42 -0500 (0:00:00.049) 0:00:24.162 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Write new policy.json file] *********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:25 Saturday 18 January 2025 11:29:42 -0500 (0:00:00.050) 0:00:24.213 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [Manage firewall for specified ports] ************************************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:139 Saturday 18 January 2025 11:29:43 -0500 (0:00:00.054) 0:00:24.267 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_firewall | length > 0", "skip_reason": "Conditional result was False" } TASK [Manage selinux for specified ports] ************************************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:146 Saturday 18 January 2025 11:29:43 -0500 (0:00:00.044) 0:00:24.311 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_selinux_ports | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Keep track of users that need to cancel linger] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:153 Saturday 18 January 2025 11:29:43 -0500 (0:00:00.037) 0:00:24.348 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_cancel_user_linger": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - present] ******* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:157 Saturday 18 January 2025 11:29:43 -0500 (0:00:00.032) 0:00:24.381 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle credential files - present] **** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:166 Saturday 18 January 2025 11:29:43 -0500 (0:00:00.032) 0:00:24.413 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle secrets] *********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:175 Saturday 18 January 2025 11:29:43 -0500 (0:00:00.031) 0:00:24.445 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Kubernetes specifications] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:182 Saturday 18 January 2025 11:29:43 -0500 (0:00:00.029) 0:00:24.474 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Quadlet specifications] ******** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:189 Saturday 18 January 2025 11:29:43 -0500 (0:00:00.027) 0:00:24.501 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node3 => (item=(censored due to no_log)) TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 18 January 2025 11:29:43 -0500 (0:00:00.070) 0:00:24.572 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": {}, "__podman_quadlet_str": "", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 18 January 2025 11:29:43 -0500 (0:00:00.040) 0:00:24.613 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "absent", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 18 January 2025 11:29:43 -0500 (0:00:00.038) 0:00:24.651 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 18 January 2025 11:29:43 -0500 (0:00:00.033) 0:00:24.684 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_name": "nopull", "__podman_quadlet_type": "container", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 18 January 2025 11:29:43 -0500 (0:00:00.044) 0:00:24.729 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 18 January 2025 11:29:43 -0500 (0:00:00.056) 0:00:24.785 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 18 January 2025 11:29:43 -0500 (0:00:00.116) 0:00:24.902 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 18 January 2025 11:29:43 -0500 (0:00:00.032) 0:00:24.934 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 18 January 2025 11:29:43 -0500 (0:00:00.042) 0:00:24.977 ****** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1737217683.9532003, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1737217679.1402063, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9160785, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "1643853349", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 18 January 2025 11:29:44 -0500 (0:00:00.417) 0:00:25.395 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 18 January 2025 11:29:44 -0500 (0:00:00.031) 0:00:25.427 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 18 January 2025 11:29:44 -0500 (0:00:00.030) 0:00:25.458 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 18 January 2025 11:29:44 -0500 (0:00:00.031) 0:00:25.489 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 18 January 2025 11:29:44 -0500 (0:00:00.031) 0:00:25.521 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 18 January 2025 11:29:44 -0500 (0:00:00.032) 0:00:25.553 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 18 January 2025 11:29:44 -0500 (0:00:00.030) 0:00:25.584 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 18 January 2025 11:29:44 -0500 (0:00:00.031) 0:00:25.615 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 18 January 2025 11:29:44 -0500 (0:00:00.030) 0:00:25.646 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": "", "__podman_service_name": "nopull.service", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 18 January 2025 11:29:44 -0500 (0:00:00.052) 0:00:25.698 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 18 January 2025 11:29:44 -0500 (0:00:00.031) 0:00:25.730 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 18 January 2025 11:29:44 -0500 (0:00:00.028) 0:00:25.759 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/etc/containers/systemd/nopull.container", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 18 January 2025 11:29:44 -0500 (0:00:00.072) 0:00:25.831 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 18 January 2025 11:29:44 -0500 (0:00:00.037) 0:00:25.868 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4 Saturday 18 January 2025 11:29:44 -0500 (0:00:00.077) 0:00:25.945 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stop and disable service] ************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 Saturday 18 January 2025 11:29:44 -0500 (0:00:00.044) 0:00:25.990 ****** ok: [managed-node3] => { "changed": false, "failed_when_result": false } MSG: Could not find the requested service nopull.service: host TASK [fedora.linux_system_roles.podman : See if quadlet file exists] *********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:33 Saturday 18 January 2025 11:29:45 -0500 (0:00:00.893) 0:00:26.884 ****** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1737217769.2441397, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "670d64fc68a9768edb20cad26df2acc703542d85", "ctime": 1737217769.2461398, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 411042020, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1737217768.77414, "nlink": 1, "path": "/etc/containers/systemd/nopull.container", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 151, "uid": 0, "version": "2705720464", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:38 Saturday 18 January 2025 11:29:46 -0500 (0:00:00.481) 0:00:27.365 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Slurp quadlet file] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:6 Saturday 18 January 2025 11:29:46 -0500 (0:00:00.094) 0:00:27.459 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:12 Saturday 18 January 2025 11:29:46 -0500 (0:00:00.534) 0:00:27.994 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet yaml file] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:44 Saturday 18 January 2025 11:29:46 -0500 (0:00:00.048) 0:00:28.043 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Reset raw variable] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:52 Saturday 18 January 2025 11:29:46 -0500 (0:00:00.029) 0:00:28.073 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_raw": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Remove quadlet file] ****************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42 Saturday 18 January 2025 11:29:46 -0500 (0:00:00.030) 0:00:28.103 ****** changed: [managed-node3] => { "changed": true, "path": "/etc/containers/systemd/nopull.container", "state": "absent" } TASK [fedora.linux_system_roles.podman : Refresh systemd] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:48 Saturday 18 January 2025 11:29:47 -0500 (0:00:00.397) 0:00:28.501 ****** ok: [managed-node3] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Remove managed resource] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:58 Saturday 18 January 2025 11:29:48 -0500 (0:00:00.766) 0:00:29.268 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Remove volumes] *********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99 Saturday 18 January 2025 11:29:48 -0500 (0:00:00.436) 0:00:29.705 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Clear parsed podman variable] ********* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:116 Saturday 18 January 2025 11:29:48 -0500 (0:00:00.042) 0:00:29.748 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_parsed": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ******** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:120 Saturday 18 January 2025 11:29:48 -0500 (0:00:00.029) 0:00:29.777 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_prune_images | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:131 Saturday 18 January 2025 11:29:48 -0500 (0:00:00.030) 0:00:29.807 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 18 January 2025 11:29:48 -0500 (0:00:00.051) 0:00:29.859 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 18 January 2025 11:29:48 -0500 (0:00:00.028) 0:00:29.887 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 18 January 2025 11:29:48 -0500 (0:00:00.028) 0:00:29.916 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - images] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:141 Saturday 18 January 2025 11:29:48 -0500 (0:00:00.027) 0:00:29.944 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_test_debug | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - volumes] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:150 Saturday 18 January 2025 11:29:48 -0500 (0:00:00.032) 0:00:29.976 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_test_debug | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - containers] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:159 Saturday 18 January 2025 11:29:48 -0500 (0:00:00.031) 0:00:30.007 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_test_debug | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - networks] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:168 Saturday 18 January 2025 11:29:48 -0500 (0:00:00.032) 0:00:30.040 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_test_debug | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - secrets] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:177 Saturday 18 January 2025 11:29:48 -0500 (0:00:00.031) 0:00:30.071 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - pods] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:187 Saturday 18 January 2025 11:29:48 -0500 (0:00:00.070) 0:00:30.141 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - services] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 Saturday 18 January 2025 11:29:48 -0500 (0:00:00.032) 0:00:30.174 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_test_debug | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 18 January 2025 11:29:48 -0500 (0:00:00.032) 0:00:30.206 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Cancel linger] ************************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:196 Saturday 18 January 2025 11:29:48 -0500 (0:00:00.027) 0:00:30.234 ****** skipping: [managed-node3] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Handle credential files - absent] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:202 Saturday 18 January 2025 11:29:49 -0500 (0:00:00.027) 0:00:30.261 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - absent] ******** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:211 Saturday 18 January 2025 11:29:49 -0500 (0:00:00.025) 0:00:30.286 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:3 Saturday 18 January 2025 11:29:49 -0500 (0:00:00.040) 0:00:30.326 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure ansible_facts used by role] **** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:3 Saturday 18 January 2025 11:29:49 -0500 (0:00:00.061) 0:00:30.387 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if system is ostree] ************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:11 Saturday 18 January 2025 11:29:49 -0500 (0:00:00.045) 0:00:30.433 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag to indicate system is ostree] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:16 Saturday 18 January 2025 11:29:49 -0500 (0:00:00.030) 0:00:30.464 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:23 Saturday 18 January 2025 11:29:49 -0500 (0:00:00.029) 0:00:30.493 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag if transactional-update exists] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:28 Saturday 18 January 2025 11:29:49 -0500 (0:00:00.029) 0:00:30.522 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:32 Saturday 18 January 2025 11:29:49 -0500 (0:00:00.030) 0:00:30.553 ****** ok: [managed-node3] => (item=RedHat.yml) => { "ansible_facts": { "__podman_packages": [ "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/vars/RedHat.yml" ], "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml" } skipping: [managed-node3] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node3] => (item=CentOS_10.yml) => { "ansible_facts": { "__podman_packages": [ "iptables-nft", "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } ok: [managed-node3] => (item=CentOS_10.yml) => { "ansible_facts": { "__podman_packages": [ "iptables-nft", "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } TASK [fedora.linux_system_roles.podman : Gather the package facts] ************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 Saturday 18 January 2025 11:29:49 -0500 (0:00:00.066) 0:00:30.619 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Enable copr if requested] ************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:10 Saturday 18 January 2025 11:29:50 -0500 (0:00:00.874) 0:00:31.494 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_use_copr | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure required packages are installed] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:14 Saturday 18 January 2025 11:29:50 -0500 (0:00:00.051) 0:00:31.545 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "(__podman_packages | difference(ansible_facts.packages))", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:28 Saturday 18 January 2025 11:29:50 -0500 (0:00:00.050) 0:00:31.596 ****** skipping: [managed-node3] => { "false_condition": "__podman_is_transactional | d(false)" } TASK [fedora.linux_system_roles.podman : Reboot transactional update systems] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:33 Saturday 18 January 2025 11:29:50 -0500 (0:00:00.100) 0:00:31.696 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if reboot is needed and not set] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:38 Saturday 18 January 2025 11:29:50 -0500 (0:00:00.048) 0:00:31.745 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get podman version] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:46 Saturday 18 January 2025 11:29:50 -0500 (0:00:00.046) 0:00:31.791 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "--version" ], "delta": "0:00:00.025030", "end": "2025-01-18 11:29:50.907446", "rc": 0, "start": "2025-01-18 11:29:50.882416" } STDOUT: podman version 5.3.1 TASK [fedora.linux_system_roles.podman : Set podman version] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:52 Saturday 18 January 2025 11:29:50 -0500 (0:00:00.448) 0:00:32.240 ****** ok: [managed-node3] => { "ansible_facts": { "podman_version": "5.3.1" }, "changed": false } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.2 or later] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:56 Saturday 18 January 2025 11:29:51 -0500 (0:00:00.057) 0:00:32.297 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_version is version(\"4.2\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:63 Saturday 18 January 2025 11:29:51 -0500 (0:00:00.045) 0:00:32.343 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_version is version(\"4.4\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:73 Saturday 18 January 2025 11:29:51 -0500 (0:00:00.054) 0:00:32.397 ****** META: end_host conditional evaluated to False, continuing execution for managed-node3 skipping: [managed-node3] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node3" } MSG: end_host conditional evaluated to false, continuing execution for managed-node3 TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:80 Saturday 18 January 2025 11:29:51 -0500 (0:00:00.056) 0:00:32.454 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__has_type_pod or __has_pod_file_ext or __has_pod_file_src_ext or __has_pod_template_src_ext or __has_pod_template_src_ext_j2", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:96 Saturday 18 January 2025 11:29:51 -0500 (0:00:00.106) 0:00:32.560 ****** META: end_host conditional evaluated to False, continuing execution for managed-node3 skipping: [managed-node3] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node3" } MSG: end_host conditional evaluated to false, continuing execution for managed-node3 TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:109 Saturday 18 January 2025 11:29:51 -0500 (0:00:00.070) 0:00:32.631 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 18 January 2025 11:29:51 -0500 (0:00:00.094) 0:00:32.725 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 18 January 2025 11:29:51 -0500 (0:00:00.053) 0:00:32.779 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 18 January 2025 11:29:51 -0500 (0:00:00.053) 0:00:32.832 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 18 January 2025 11:29:51 -0500 (0:00:00.065) 0:00:32.898 ****** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1737217683.9532003, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1737217679.1402063, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9160785, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "1643853349", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 18 January 2025 11:29:52 -0500 (0:00:00.419) 0:00:33.318 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 18 January 2025 11:29:52 -0500 (0:00:00.033) 0:00:33.351 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 18 January 2025 11:29:52 -0500 (0:00:00.035) 0:00:33.387 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 18 January 2025 11:29:52 -0500 (0:00:00.049) 0:00:33.436 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 18 January 2025 11:29:52 -0500 (0:00:00.053) 0:00:33.490 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 18 January 2025 11:29:52 -0500 (0:00:00.112) 0:00:33.603 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 18 January 2025 11:29:52 -0500 (0:00:00.062) 0:00:33.665 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 18 January 2025 11:29:52 -0500 (0:00:00.051) 0:00:33.716 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set config file paths] **************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:115 Saturday 18 January 2025 11:29:52 -0500 (0:00:00.051) 0:00:33.768 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_container_conf_file": "/etc/containers/containers.conf.d/50-systemroles.conf", "__podman_policy_json_file": "/etc/containers/policy.json", "__podman_registries_conf_file": "/etc/containers/registries.conf.d/50-systemroles.conf", "__podman_storage_conf_file": "/etc/containers/storage.conf" }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle container.conf.d] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:124 Saturday 18 January 2025 11:29:52 -0500 (0:00:00.065) 0:00:33.834 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure containers.d exists] *********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:5 Saturday 18 January 2025 11:29:52 -0500 (0:00:00.096) 0:00:33.930 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update container config file] ********* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:13 Saturday 18 January 2025 11:29:52 -0500 (0:00:00.049) 0:00:33.980 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle registries.conf.d] ************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:127 Saturday 18 January 2025 11:29:52 -0500 (0:00:00.050) 0:00:34.031 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure registries.d exists] *********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:5 Saturday 18 January 2025 11:29:52 -0500 (0:00:00.098) 0:00:34.130 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update registries config file] ******** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:13 Saturday 18 January 2025 11:29:52 -0500 (0:00:00.049) 0:00:34.179 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle storage.conf] ****************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:130 Saturday 18 January 2025 11:29:52 -0500 (0:00:00.051) 0:00:34.230 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure storage.conf parent dir exists] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:5 Saturday 18 January 2025 11:29:53 -0500 (0:00:00.098) 0:00:34.329 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update storage config file] *********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:13 Saturday 18 January 2025 11:29:53 -0500 (0:00:00.046) 0:00:34.375 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle policy.json] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:133 Saturday 18 January 2025 11:29:53 -0500 (0:00:00.050) 0:00:34.426 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure policy.json parent dir exists] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:6 Saturday 18 January 2025 11:29:53 -0500 (0:00:00.104) 0:00:34.531 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat the policy.json file] ************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:14 Saturday 18 January 2025 11:29:53 -0500 (0:00:00.037) 0:00:34.568 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get the existing policy.json] ********* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:19 Saturday 18 January 2025 11:29:53 -0500 (0:00:00.036) 0:00:34.605 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Write new policy.json file] *********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:25 Saturday 18 January 2025 11:29:53 -0500 (0:00:00.102) 0:00:34.707 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [Manage firewall for specified ports] ************************************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:139 Saturday 18 January 2025 11:29:53 -0500 (0:00:00.030) 0:00:34.737 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_firewall | length > 0", "skip_reason": "Conditional result was False" } TASK [Manage selinux for specified ports] ************************************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:146 Saturday 18 January 2025 11:29:53 -0500 (0:00:00.030) 0:00:34.768 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_selinux_ports | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Keep track of users that need to cancel linger] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:153 Saturday 18 January 2025 11:29:53 -0500 (0:00:00.029) 0:00:34.798 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_cancel_user_linger": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - present] ******* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:157 Saturday 18 January 2025 11:29:53 -0500 (0:00:00.031) 0:00:34.829 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle credential files - present] **** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:166 Saturday 18 January 2025 11:29:53 -0500 (0:00:00.026) 0:00:34.856 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle secrets] *********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:175 Saturday 18 January 2025 11:29:53 -0500 (0:00:00.029) 0:00:34.885 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Kubernetes specifications] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:182 Saturday 18 January 2025 11:29:53 -0500 (0:00:00.027) 0:00:34.913 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Quadlet specifications] ******** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:189 Saturday 18 January 2025 11:29:53 -0500 (0:00:00.042) 0:00:34.955 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node3 => (item=(censored due to no_log)) TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 18 January 2025 11:29:53 -0500 (0:00:00.119) 0:00:35.075 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": {}, "__podman_quadlet_str": "", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 18 January 2025 11:29:53 -0500 (0:00:00.064) 0:00:35.139 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "absent", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 18 January 2025 11:29:53 -0500 (0:00:00.062) 0:00:35.202 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 18 January 2025 11:29:54 -0500 (0:00:00.052) 0:00:35.254 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_name": "bogus", "__podman_quadlet_type": "container", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 18 January 2025 11:29:54 -0500 (0:00:00.061) 0:00:35.316 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 18 January 2025 11:29:54 -0500 (0:00:00.063) 0:00:35.379 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 18 January 2025 11:29:54 -0500 (0:00:00.042) 0:00:35.422 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 18 January 2025 11:29:54 -0500 (0:00:00.037) 0:00:35.459 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 18 January 2025 11:29:54 -0500 (0:00:00.040) 0:00:35.500 ****** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1737217683.9532003, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1737217679.1402063, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9160785, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "1643853349", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 18 January 2025 11:29:54 -0500 (0:00:00.400) 0:00:35.900 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 18 January 2025 11:29:54 -0500 (0:00:00.034) 0:00:35.935 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 18 January 2025 11:29:54 -0500 (0:00:00.084) 0:00:36.019 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 18 January 2025 11:29:54 -0500 (0:00:00.034) 0:00:36.054 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 18 January 2025 11:29:54 -0500 (0:00:00.032) 0:00:36.086 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 18 January 2025 11:29:54 -0500 (0:00:00.030) 0:00:36.117 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 18 January 2025 11:29:54 -0500 (0:00:00.032) 0:00:36.149 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 18 January 2025 11:29:54 -0500 (0:00:00.030) 0:00:36.180 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 18 January 2025 11:29:54 -0500 (0:00:00.030) 0:00:36.211 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": "", "__podman_service_name": "bogus.service", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 18 January 2025 11:29:55 -0500 (0:00:00.065) 0:00:36.276 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 18 January 2025 11:29:55 -0500 (0:00:00.055) 0:00:36.332 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 18 January 2025 11:29:55 -0500 (0:00:00.032) 0:00:36.364 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/etc/containers/systemd/bogus.container", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 18 January 2025 11:29:55 -0500 (0:00:00.088) 0:00:36.453 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 18 January 2025 11:29:55 -0500 (0:00:00.042) 0:00:36.496 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4 Saturday 18 January 2025 11:29:55 -0500 (0:00:00.071) 0:00:36.567 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stop and disable service] ************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 Saturday 18 January 2025 11:29:55 -0500 (0:00:00.029) 0:00:36.596 ****** changed: [managed-node3] => { "changed": true, "enabled": false, "failed_when_result": false, "name": "bogus.service", "state": "stopped", "status": { "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0", "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "system.slice sysinit.target basic.target -.mount network-online.target systemd-journald.socket", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "shutdown.target multi-user.target", "BindLogSockets": "no", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanLiveMount": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DebugInvocation": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "yes", "DelegateControllers": "cpu cpuset io memory pids", "Description": "bogus.service", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveMemoryHigh": "3698237440", "EffectiveMemoryMax": "3698237440", "EffectiveTasksMax": "22365", "Environment": "PODMAN_SYSTEMD_UNIT=bogus.service", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman run --name bogus --cidfile=/run/bogus.cid --replace --rm --cgroups=split --sdnotify=conmon -d this_is_a_bogus_image ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman run --name bogus --cidfile=/run/bogus.cid --replace --rm --cgroups=split --sdnotify=conmon -d this_is_a_bogus_image ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStop": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/bogus.cid ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/bogus.cid ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPost": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/bogus.cid ; ignore_errors=yes ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPostEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/bogus.cid ; flags=ignore-failure ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/run/systemd/generator/bogus.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "bogus.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13978", "LimitNPROCSoft": "13978", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13978", "LimitSIGPENDINGSoft": "13978", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LiveMountResult": "success", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureDurationUSec": "[not set]", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3257384960", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "[not set]", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "[not set]", "MemorySwapMax": "infinity", "MemorySwapPeak": "[not set]", "MemoryZSwapCurrent": "[not set]", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "bogus.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "all", "OOMPolicy": "continue", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivatePIDs": "no", "PrivateTmp": "no", "PrivateTmpEx": "no", "PrivateUsers": "no", "PrivateUsersEx": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectControlGroupsEx": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "-.mount system.slice sysinit.target", "RequiresMountsFor": "/run/containers", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "SourcePath": "/etc/containers/systemd/bogus.container", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogIdentifier": "bogus", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22365", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "WantedBy": "multi-user.target", "Wants": "network-online.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity" } } TASK [fedora.linux_system_roles.podman : See if quadlet file exists] *********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:33 Saturday 18 January 2025 11:29:56 -0500 (0:00:00.783) 0:00:37.380 ****** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1737217778.1821353, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "1d087e679d135214e8ac9ccaf33b2222916efb7f", "ctime": 1737217778.1851354, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 633340124, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1737217777.8901355, "nlink": 1, "path": "/etc/containers/systemd/bogus.container", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 138, "uid": 0, "version": "1977658125", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:38 Saturday 18 January 2025 11:29:56 -0500 (0:00:00.445) 0:00:37.825 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Slurp quadlet file] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:6 Saturday 18 January 2025 11:29:56 -0500 (0:00:00.100) 0:00:37.926 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:12 Saturday 18 January 2025 11:29:57 -0500 (0:00:00.422) 0:00:38.348 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet yaml file] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:44 Saturday 18 January 2025 11:29:57 -0500 (0:00:00.079) 0:00:38.428 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Reset raw variable] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:52 Saturday 18 January 2025 11:29:57 -0500 (0:00:00.114) 0:00:38.542 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_raw": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Remove quadlet file] ****************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42 Saturday 18 January 2025 11:29:57 -0500 (0:00:00.050) 0:00:38.593 ****** changed: [managed-node3] => { "changed": true, "path": "/etc/containers/systemd/bogus.container", "state": "absent" } TASK [fedora.linux_system_roles.podman : Refresh systemd] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:48 Saturday 18 January 2025 11:29:57 -0500 (0:00:00.419) 0:00:39.012 ****** ok: [managed-node3] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Remove managed resource] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:58 Saturday 18 January 2025 11:29:58 -0500 (0:00:00.741) 0:00:39.753 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Remove volumes] *********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99 Saturday 18 January 2025 11:29:58 -0500 (0:00:00.478) 0:00:40.232 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Clear parsed podman variable] ********* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:116 Saturday 18 January 2025 11:29:59 -0500 (0:00:00.050) 0:00:40.283 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_parsed": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ******** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:120 Saturday 18 January 2025 11:29:59 -0500 (0:00:00.036) 0:00:40.319 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_prune_images | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:131 Saturday 18 January 2025 11:29:59 -0500 (0:00:00.029) 0:00:40.349 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 18 January 2025 11:29:59 -0500 (0:00:00.052) 0:00:40.402 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 18 January 2025 11:29:59 -0500 (0:00:00.029) 0:00:40.431 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 18 January 2025 11:29:59 -0500 (0:00:00.028) 0:00:40.460 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - images] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:141 Saturday 18 January 2025 11:29:59 -0500 (0:00:00.029) 0:00:40.490 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_test_debug | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - volumes] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:150 Saturday 18 January 2025 11:29:59 -0500 (0:00:00.034) 0:00:40.525 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_test_debug | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - containers] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:159 Saturday 18 January 2025 11:29:59 -0500 (0:00:00.039) 0:00:40.564 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_test_debug | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - networks] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:168 Saturday 18 January 2025 11:29:59 -0500 (0:00:00.051) 0:00:40.616 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_test_debug | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - secrets] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:177 Saturday 18 January 2025 11:29:59 -0500 (0:00:00.053) 0:00:40.670 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - pods] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:187 Saturday 18 January 2025 11:29:59 -0500 (0:00:00.053) 0:00:40.723 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - services] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 Saturday 18 January 2025 11:29:59 -0500 (0:00:00.054) 0:00:40.778 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_test_debug | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 18 January 2025 11:29:59 -0500 (0:00:00.051) 0:00:40.829 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Cancel linger] ************************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:196 Saturday 18 January 2025 11:29:59 -0500 (0:00:00.047) 0:00:40.877 ****** skipping: [managed-node3] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Handle credential files - absent] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:202 Saturday 18 January 2025 11:29:59 -0500 (0:00:00.045) 0:00:40.922 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - absent] ******** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:211 Saturday 18 January 2025 11:29:59 -0500 (0:00:00.102) 0:00:41.025 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [Create user for testing] ************************************************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_basic.yml:125 Saturday 18 January 2025 11:29:59 -0500 (0:00:00.101) 0:00:41.126 ****** changed: [managed-node3] => { "changed": true, "comment": "", "create_home": true, "group": 1111, "home": "/home/user_quadlet_basic", "name": "user_quadlet_basic", "shell": "/bin/bash", "state": "present", "system": false, "uid": 1111 } TASK [Get local machine ID] **************************************************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_basic.yml:138 Saturday 18 January 2025 11:30:00 -0500 (0:00:00.715) 0:00:41.842 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "ansible_facts[\"distribution_version\"] is version(\"9\", \"<\")", "skip_reason": "Conditional result was False" } TASK [Skip test if cannot reboot] ********************************************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_basic.yml:144 Saturday 18 January 2025 11:30:00 -0500 (0:00:00.054) 0:00:41.897 ****** META: end_host conditional evaluated to False, continuing execution for managed-node3 skipping: [managed-node3] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node3" } MSG: end_host conditional evaluated to false, continuing execution for managed-node3 TASK [Enable cgroup controllers] *********************************************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_basic.yml:150 Saturday 18 January 2025 11:30:00 -0500 (0:00:00.061) 0:00:41.958 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "ansible_facts[\"distribution_version\"] is version(\"9\", \"<\")", "skip_reason": "Conditional result was False" } TASK [Configure cgroups in kernel] ********************************************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_basic.yml:182 Saturday 18 January 2025 11:30:00 -0500 (0:00:00.058) 0:00:42.016 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "ansible_facts[\"distribution_version\"] is version(\"9\", \"<\")", "skip_reason": "Conditional result was False" } TASK [Reboot] ****************************************************************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_basic.yml:188 Saturday 18 January 2025 11:30:00 -0500 (0:00:00.059) 0:00:42.076 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "ansible_facts[\"distribution_version\"] is version(\"9\", \"<\")", "skip_reason": "Conditional result was False" } TASK [Run the role - user] ***************************************************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_basic.yml:191 Saturday 18 January 2025 11:30:00 -0500 (0:00:00.058) 0:00:42.134 ****** included: fedora.linux_system_roles.podman for managed-node3 TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:3 Saturday 18 January 2025 11:30:01 -0500 (0:00:00.150) 0:00:42.284 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure ansible_facts used by role] **** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:3 Saturday 18 January 2025 11:30:01 -0500 (0:00:00.078) 0:00:42.363 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if system is ostree] ************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:11 Saturday 18 January 2025 11:30:01 -0500 (0:00:00.055) 0:00:42.419 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag to indicate system is ostree] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:16 Saturday 18 January 2025 11:30:01 -0500 (0:00:00.047) 0:00:42.467 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:23 Saturday 18 January 2025 11:30:01 -0500 (0:00:00.051) 0:00:42.518 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag if transactional-update exists] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:28 Saturday 18 January 2025 11:30:01 -0500 (0:00:00.051) 0:00:42.570 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:32 Saturday 18 January 2025 11:30:01 -0500 (0:00:00.047) 0:00:42.617 ****** ok: [managed-node3] => (item=RedHat.yml) => { "ansible_facts": { "__podman_packages": [ "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/vars/RedHat.yml" ], "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml" } skipping: [managed-node3] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node3] => (item=CentOS_10.yml) => { "ansible_facts": { "__podman_packages": [ "iptables-nft", "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } ok: [managed-node3] => (item=CentOS_10.yml) => { "ansible_facts": { "__podman_packages": [ "iptables-nft", "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } TASK [fedora.linux_system_roles.podman : Gather the package facts] ************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 Saturday 18 January 2025 11:30:01 -0500 (0:00:00.113) 0:00:42.730 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Enable copr if requested] ************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:10 Saturday 18 January 2025 11:30:02 -0500 (0:00:01.061) 0:00:43.791 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_use_copr | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure required packages are installed] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:14 Saturday 18 January 2025 11:30:02 -0500 (0:00:00.053) 0:00:43.844 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "(__podman_packages | difference(ansible_facts.packages))", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:28 Saturday 18 January 2025 11:30:02 -0500 (0:00:00.060) 0:00:43.905 ****** skipping: [managed-node3] => { "false_condition": "__podman_is_transactional | d(false)" } TASK [fedora.linux_system_roles.podman : Reboot transactional update systems] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:33 Saturday 18 January 2025 11:30:02 -0500 (0:00:00.051) 0:00:43.956 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if reboot is needed and not set] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:38 Saturday 18 January 2025 11:30:02 -0500 (0:00:00.053) 0:00:44.010 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get podman version] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:46 Saturday 18 January 2025 11:30:02 -0500 (0:00:00.049) 0:00:44.060 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "--version" ], "delta": "0:00:00.024845", "end": "2025-01-18 11:30:03.171863", "rc": 0, "start": "2025-01-18 11:30:03.147018" } STDOUT: podman version 5.3.1 TASK [fedora.linux_system_roles.podman : Set podman version] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:52 Saturday 18 January 2025 11:30:03 -0500 (0:00:00.425) 0:00:44.485 ****** ok: [managed-node3] => { "ansible_facts": { "podman_version": "5.3.1" }, "changed": false } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.2 or later] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:56 Saturday 18 January 2025 11:30:03 -0500 (0:00:00.032) 0:00:44.517 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_version is version(\"4.2\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:63 Saturday 18 January 2025 11:30:03 -0500 (0:00:00.029) 0:00:44.546 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_version is version(\"4.4\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:73 Saturday 18 January 2025 11:30:03 -0500 (0:00:00.068) 0:00:44.615 ****** META: end_host conditional evaluated to False, continuing execution for managed-node3 skipping: [managed-node3] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node3" } MSG: end_host conditional evaluated to false, continuing execution for managed-node3 TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:80 Saturday 18 January 2025 11:30:03 -0500 (0:00:00.066) 0:00:44.681 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__has_type_pod or __has_pod_file_ext or __has_pod_file_src_ext or __has_pod_template_src_ext or __has_pod_template_src_ext_j2", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:96 Saturday 18 January 2025 11:30:03 -0500 (0:00:00.062) 0:00:44.744 ****** META: end_host conditional evaluated to False, continuing execution for managed-node3 skipping: [managed-node3] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node3" } MSG: end_host conditional evaluated to false, continuing execution for managed-node3 TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:109 Saturday 18 January 2025 11:30:03 -0500 (0:00:00.056) 0:00:44.801 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 18 January 2025 11:30:03 -0500 (0:00:00.058) 0:00:44.859 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 18 January 2025 11:30:03 -0500 (0:00:00.035) 0:00:44.895 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 18 January 2025 11:30:03 -0500 (0:00:00.037) 0:00:44.932 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 18 January 2025 11:30:03 -0500 (0:00:00.125) 0:00:45.058 ****** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1737217683.9532003, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1737217679.1402063, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9160785, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "1643853349", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 18 January 2025 11:30:04 -0500 (0:00:00.414) 0:00:45.472 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 18 January 2025 11:30:04 -0500 (0:00:00.034) 0:00:45.506 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 18 January 2025 11:30:04 -0500 (0:00:00.033) 0:00:45.540 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 18 January 2025 11:30:04 -0500 (0:00:00.033) 0:00:45.573 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 18 January 2025 11:30:04 -0500 (0:00:00.031) 0:00:45.605 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 18 January 2025 11:30:04 -0500 (0:00:00.032) 0:00:45.637 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 18 January 2025 11:30:04 -0500 (0:00:00.031) 0:00:45.669 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 18 January 2025 11:30:04 -0500 (0:00:00.032) 0:00:45.702 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set config file paths] **************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:115 Saturday 18 January 2025 11:30:04 -0500 (0:00:00.031) 0:00:45.733 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_container_conf_file": "/etc/containers/containers.conf.d/50-systemroles.conf", "__podman_policy_json_file": "/etc/containers/policy.json", "__podman_registries_conf_file": "/etc/containers/registries.conf.d/50-systemroles.conf", "__podman_storage_conf_file": "/etc/containers/storage.conf" }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle container.conf.d] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:124 Saturday 18 January 2025 11:30:04 -0500 (0:00:00.044) 0:00:45.778 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure containers.d exists] *********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:5 Saturday 18 January 2025 11:30:04 -0500 (0:00:00.076) 0:00:45.855 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update container config file] ********* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:13 Saturday 18 January 2025 11:30:04 -0500 (0:00:00.049) 0:00:45.905 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle registries.conf.d] ************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:127 Saturday 18 January 2025 11:30:04 -0500 (0:00:00.036) 0:00:45.941 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure registries.d exists] *********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:5 Saturday 18 January 2025 11:30:04 -0500 (0:00:00.067) 0:00:46.009 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update registries config file] ******** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:13 Saturday 18 January 2025 11:30:04 -0500 (0:00:00.034) 0:00:46.043 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle storage.conf] ****************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:130 Saturday 18 January 2025 11:30:04 -0500 (0:00:00.034) 0:00:46.078 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure storage.conf parent dir exists] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:5 Saturday 18 January 2025 11:30:04 -0500 (0:00:00.056) 0:00:46.134 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update storage config file] *********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:13 Saturday 18 January 2025 11:30:04 -0500 (0:00:00.030) 0:00:46.165 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle policy.json] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:133 Saturday 18 January 2025 11:30:04 -0500 (0:00:00.079) 0:00:46.244 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure policy.json parent dir exists] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:6 Saturday 18 January 2025 11:30:05 -0500 (0:00:00.059) 0:00:46.304 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat the policy.json file] ************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:14 Saturday 18 January 2025 11:30:05 -0500 (0:00:00.032) 0:00:46.336 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get the existing policy.json] ********* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:19 Saturday 18 January 2025 11:30:05 -0500 (0:00:00.053) 0:00:46.390 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Write new policy.json file] *********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:25 Saturday 18 January 2025 11:30:05 -0500 (0:00:00.035) 0:00:46.425 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [Manage firewall for specified ports] ************************************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:139 Saturday 18 January 2025 11:30:05 -0500 (0:00:00.047) 0:00:46.472 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_firewall | length > 0", "skip_reason": "Conditional result was False" } TASK [Manage selinux for specified ports] ************************************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:146 Saturday 18 January 2025 11:30:05 -0500 (0:00:00.051) 0:00:46.524 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_selinux_ports | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Keep track of users that need to cancel linger] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:153 Saturday 18 January 2025 11:30:05 -0500 (0:00:00.053) 0:00:46.577 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_cancel_user_linger": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - present] ******* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:157 Saturday 18 January 2025 11:30:05 -0500 (0:00:00.052) 0:00:46.630 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle credential files - present] **** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:166 Saturday 18 January 2025 11:30:05 -0500 (0:00:00.044) 0:00:46.674 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle secrets] *********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:175 Saturday 18 January 2025 11:30:05 -0500 (0:00:00.047) 0:00:46.721 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml for managed-node3 => (item=(censored due to no_log)) included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml for managed-node3 => (item=(censored due to no_log)) TASK [fedora.linux_system_roles.podman : Set variables part 1] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:3 Saturday 18 January 2025 11:30:05 -0500 (0:00:00.166) 0:00:46.888 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_user": "user_quadlet_basic" }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:7 Saturday 18 January 2025 11:30:05 -0500 (0:00:00.038) 0:00:46.926 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node3 => (item=(censored due to no_log)) TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 18 January 2025 11:30:05 -0500 (0:00:00.067) 0:00:46.994 ****** ok: [managed-node3] => { "ansible_facts": { "getent_passwd": { "user_quadlet_basic": [ "x", "1111", "1111", "", "/home/user_quadlet_basic", "/bin/bash" ] } }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 18 January 2025 11:30:06 -0500 (0:00:00.446) 0:00:47.440 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 18 January 2025 11:30:06 -0500 (0:00:00.070) 0:00:47.511 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_group": "1111" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 18 January 2025 11:30:06 -0500 (0:00:00.071) 0:00:47.583 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 18 January 2025 11:30:06 -0500 (0:00:00.049) 0:00:47.633 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 18 January 2025 11:30:06 -0500 (0:00:00.120) 0:00:47.753 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 18 January 2025 11:30:06 -0500 (0:00:00.053) 0:00:47.806 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 18 January 2025 11:30:06 -0500 (0:00:00.052) 0:00:47.859 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 18 January 2025 11:30:06 -0500 (0:00:00.048) 0:00:47.908 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 18 January 2025 11:30:06 -0500 (0:00:00.055) 0:00:47.963 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 18 January 2025 11:30:06 -0500 (0:00:00.054) 0:00:48.018 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 18 January 2025 11:30:06 -0500 (0:00:00.055) 0:00:48.073 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set variables part 2] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:14 Saturday 18 January 2025 11:30:06 -0500 (0:00:00.053) 0:00:48.127 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_rootless": true, "__podman_xdg_runtime_dir": "/run/user/1111" }, "changed": false } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:20 Saturday 18 January 2025 11:30:06 -0500 (0:00:00.067) 0:00:48.194 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 18 January 2025 11:30:07 -0500 (0:00:00.096) 0:00:48.291 ****** changed: [managed-node3] => { "changed": true, "cmd": [ "loginctl", "enable-linger", "user_quadlet_basic" ], "delta": "0:00:00.019639", "end": "2025-01-18 11:30:07.409870", "rc": 0, "start": "2025-01-18 11:30:07.390231" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 18 January 2025 11:30:07 -0500 (0:00:00.488) 0:00:48.780 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_cancel_user_linger": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 18 January 2025 11:30:07 -0500 (0:00:00.062) 0:00:48.842 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_item_state | d('present') == 'absent'", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:25 Saturday 18 January 2025 11:30:07 -0500 (0:00:00.039) 0:00:48.882 ****** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1737217807.4361207, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "ctime": 1737217807.5381207, "dev": 43, "device_type": 0, "executable": true, "exists": true, "gid": 1111, "gr_name": "user_quadlet_basic", "inode": 1, "isblk": false, "ischr": false, "isdir": true, "isfifo": false, "isgid": false, "islnk": false, "isreg": false, "issock": false, "isuid": false, "mimetype": "inode/directory", "mode": "0700", "mtime": 1737217807.5381207, "nlink": 3, "path": "/run/user/1111", "pw_name": "user_quadlet_basic", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 80, "uid": 1111, "version": null, "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": true } } TASK [fedora.linux_system_roles.podman : Manage each secret] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:41 Saturday 18 January 2025 11:30:08 -0500 (0:00:00.392) 0:00:49.275 ****** [WARNING]: Using a variable for a task's 'args' is unsafe in some situations (see https://docs.ansible.com/ansible/devel/reference_appendices/faq.html#argsplat- unsafe) changed: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } TASK [fedora.linux_system_roles.podman : Set variables part 1] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:3 Saturday 18 January 2025 11:30:08 -0500 (0:00:00.865) 0:00:50.140 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_user": "user_quadlet_basic" }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:7 Saturday 18 January 2025 11:30:08 -0500 (0:00:00.036) 0:00:50.177 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node3 => (item=(censored due to no_log)) TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 18 January 2025 11:30:08 -0500 (0:00:00.053) 0:00:50.230 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 18 January 2025 11:30:09 -0500 (0:00:00.033) 0:00:50.264 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 18 January 2025 11:30:09 -0500 (0:00:00.033) 0:00:50.298 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_group": "1111" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 18 January 2025 11:30:09 -0500 (0:00:00.039) 0:00:50.338 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 18 January 2025 11:30:09 -0500 (0:00:00.082) 0:00:50.420 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 18 January 2025 11:30:09 -0500 (0:00:00.049) 0:00:50.470 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 18 January 2025 11:30:09 -0500 (0:00:00.053) 0:00:50.523 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 18 January 2025 11:30:09 -0500 (0:00:00.053) 0:00:50.576 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 18 January 2025 11:30:09 -0500 (0:00:00.055) 0:00:50.632 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 18 January 2025 11:30:09 -0500 (0:00:00.057) 0:00:50.689 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 18 January 2025 11:30:09 -0500 (0:00:00.051) 0:00:50.740 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 18 January 2025 11:30:09 -0500 (0:00:00.038) 0:00:50.779 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set variables part 2] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:14 Saturday 18 January 2025 11:30:09 -0500 (0:00:00.035) 0:00:50.815 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_rootless": true, "__podman_xdg_runtime_dir": "/run/user/1111" }, "changed": false } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:20 Saturday 18 January 2025 11:30:09 -0500 (0:00:00.046) 0:00:50.861 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 18 January 2025 11:30:09 -0500 (0:00:00.063) 0:00:50.925 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "loginctl", "enable-linger", "user_quadlet_basic" ], "delta": null, "end": null, "rc": 0, "start": null } STDOUT: skipped, since /var/lib/systemd/linger/user_quadlet_basic exists MSG: Did not run command since '/var/lib/systemd/linger/user_quadlet_basic' exists TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 18 January 2025 11:30:10 -0500 (0:00:00.389) 0:00:51.314 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_cancel_user_linger": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 18 January 2025 11:30:10 -0500 (0:00:00.037) 0:00:51.351 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_item_state | d('present') == 'absent'", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:25 Saturday 18 January 2025 11:30:10 -0500 (0:00:00.034) 0:00:51.386 ****** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1737217807.4361207, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "ctime": 1737217808.5591202, "dev": 43, "device_type": 0, "executable": true, "exists": true, "gid": 1111, "gr_name": "user_quadlet_basic", "inode": 1, "isblk": false, "ischr": false, "isdir": true, "isfifo": false, "isgid": false, "islnk": false, "isreg": false, "issock": false, "isuid": false, "mimetype": "inode/directory", "mode": "0700", "mtime": 1737217808.5591202, "nlink": 5, "path": "/run/user/1111", "pw_name": "user_quadlet_basic", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 120, "uid": 1111, "version": null, "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": true } } TASK [fedora.linux_system_roles.podman : Manage each secret] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:41 Saturday 18 January 2025 11:30:10 -0500 (0:00:00.415) 0:00:51.802 ****** changed: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } TASK [fedora.linux_system_roles.podman : Handle Kubernetes specifications] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:182 Saturday 18 January 2025 11:30:12 -0500 (0:00:01.677) 0:00:53.479 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Quadlet specifications] ******** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:189 Saturday 18 January 2025 11:30:12 -0500 (0:00:00.027) 0:00:53.506 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node3 => (item=(censored due to no_log)) included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node3 => (item=(censored due to no_log)) included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node3 => (item=(censored due to no_log)) included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node3 => (item=(censored due to no_log)) included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node3 => (item=(censored due to no_log)) TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 18 January 2025 11:30:12 -0500 (0:00:00.125) 0:00:53.632 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": {}, "__podman_quadlet_str": "[Network]\nSubnet=192.168.29.0/24\nGateway=192.168.29.1\nLabel=app=wordpress\nNetworkName=quadlet-basic-name\n", "__podman_quadlet_template_src": "templates/quadlet-basic.network.j2" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 18 January 2025 11:30:12 -0500 (0:00:00.135) 0:00:53.768 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "created", "__podman_systemd_unit_scope": "", "__podman_user": "user_quadlet_basic" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 18 January 2025 11:30:12 -0500 (0:00:00.107) 0:00:53.875 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_quadlet_str", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 18 January 2025 11:30:12 -0500 (0:00:00.032) 0:00:53.908 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-basic", "__podman_quadlet_type": "network", "__podman_rootless": true }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 18 January 2025 11:30:12 -0500 (0:00:00.049) 0:00:53.958 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 18 January 2025 11:30:12 -0500 (0:00:00.057) 0:00:54.015 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 18 January 2025 11:30:12 -0500 (0:00:00.034) 0:00:54.049 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 18 January 2025 11:30:12 -0500 (0:00:00.034) 0:00:54.084 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_group": "1111" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 18 January 2025 11:30:12 -0500 (0:00:00.042) 0:00:54.127 ****** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1737217683.9532003, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1737217679.1402063, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9160785, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "1643853349", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 18 January 2025 11:30:13 -0500 (0:00:00.396) 0:00:54.523 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "getsubids", "user_quadlet_basic" ], "delta": "0:00:00.004636", "end": "2025-01-18 11:30:13.600851", "rc": 0, "start": "2025-01-18 11:30:13.596215" } STDOUT: 0: user_quadlet_basic 589824 65536 TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 18 January 2025 11:30:13 -0500 (0:00:00.394) 0:00:54.917 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "getsubids", "-g", "user_quadlet_basic" ], "delta": "0:00:00.005466", "end": "2025-01-18 11:30:13.990875", "rc": 0, "start": "2025-01-18 11:30:13.985409" } STDOUT: 0: user_quadlet_basic 589824 65536 TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 18 January 2025 11:30:14 -0500 (0:00:00.387) 0:00:55.305 ****** ok: [managed-node3] => { "ansible_facts": { "podman_subgid_info": { "user_quadlet_basic": { "range": 65536, "start": 589824 } }, "podman_subuid_info": { "user_quadlet_basic": { "range": 65536, "start": 589824 } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 18 January 2025 11:30:14 -0500 (0:00:00.046) 0:00:55.351 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 18 January 2025 11:30:14 -0500 (0:00:00.033) 0:00:55.385 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 18 January 2025 11:30:14 -0500 (0:00:00.031) 0:00:55.417 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 18 January 2025 11:30:14 -0500 (0:00:00.033) 0:00:55.450 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 18 January 2025 11:30:14 -0500 (0:00:00.032) 0:00:55.482 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 18 January 2025 11:30:14 -0500 (0:00:00.034) 0:00:55.517 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-basic-network.service", "__podman_systemd_scope": "user", "__podman_user_home_dir": "/home/user_quadlet_basic", "__podman_xdg_runtime_dir": "/run/user/1111" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 18 January 2025 11:30:14 -0500 (0:00:00.052) 0:00:55.570 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_path": "/home/user_quadlet_basic/.config/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 18 January 2025 11:30:14 -0500 (0:00:00.034) 0:00:55.604 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_kube_yamls_raw | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 18 January 2025 11:30:14 -0500 (0:00:00.031) 0:00:55.635 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic.network", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 18 January 2025 11:30:14 -0500 (0:00:00.074) 0:00:55.710 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 18 January 2025 11:30:14 -0500 (0:00:00.037) 0:00:55.748 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_state == \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 18 January 2025 11:30:14 -0500 (0:00:00.080) 0:00:55.828 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2 Saturday 18 January 2025 11:30:14 -0500 (0:00:00.099) 0:00:55.928 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 18 January 2025 11:30:14 -0500 (0:00:00.075) 0:00:56.004 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "loginctl", "enable-linger", "user_quadlet_basic" ], "delta": null, "end": null, "rc": 0, "start": null } STDOUT: skipped, since /var/lib/systemd/linger/user_quadlet_basic exists MSG: Did not run command since '/var/lib/systemd/linger/user_quadlet_basic' exists TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 18 January 2025 11:30:15 -0500 (0:00:00.435) 0:00:56.440 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_cancel_user_linger": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 18 January 2025 11:30:15 -0500 (0:00:00.060) 0:00:56.500 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_item_state | d('present') == 'absent'", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create host directories] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7 Saturday 18 January 2025 11:30:15 -0500 (0:00:00.060) 0:00:56.561 ****** skipping: [managed-node3] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Ensure container images are present] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 Saturday 18 January 2025 11:30:15 -0500 (0:00:00.046) 0:00:56.607 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39 Saturday 18 January 2025 11:30:15 -0500 (0:00:00.050) 0:00:56.658 ****** changed: [managed-node3] => { "changed": true, "gid": 1111, "group": "user_quadlet_basic", "mode": "0755", "owner": "user_quadlet_basic", "path": "/home/user_quadlet_basic/.config/containers/systemd", "secontext": "unconfined_u:object_r:config_home_t:s0", "size": 6, "state": "directory", "uid": 1111 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ******** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:48 Saturday 18 January 2025 11:30:15 -0500 (0:00:00.440) 0:00:57.098 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_quadlet_file_src | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:58 Saturday 18 January 2025 11:30:15 -0500 (0:00:00.038) 0:00:57.137 ****** changed: [managed-node3] => { "changed": true, "checksum": "19c9b17be2af9b9deca5c3bd327f048966750682", "dest": "/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic.network", "gid": 1111, "group": "user_quadlet_basic", "md5sum": "313e9a2e5a99f80fa7023c19a1065658", "mode": "0644", "owner": "user_quadlet_basic", "secontext": "unconfined_u:object_r:config_home_t:s0", "size": 105, "src": "/root/.ansible/tmp/ansible-tmp-1737217815.9402516-12676-68902585171670/.source.network", "state": "file", "uid": 1111 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] ******* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:70 Saturday 18 January 2025 11:30:16 -0500 (0:00:00.731) 0:00:57.868 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_copy_content is skipped", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Reload systemctl] ********************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:82 Saturday 18 January 2025 11:30:16 -0500 (0:00:00.031) 0:00:57.899 ****** ok: [managed-node3] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Start service] ************************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110 Saturday 18 January 2025 11:30:17 -0500 (0:00:00.684) 0:00:58.584 ****** changed: [managed-node3] => { "changed": true, "name": "quadlet-basic-network.service", "state": "started", "status": { "AccessSELinuxContext": "unconfined_u:object_r:user_tmp_t:s0", "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "podman-user-wait-network-online.service basic.target -.mount run-user-1111.mount app.slice", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "shutdown.target", "BindLogSockets": "no", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanLiveMount": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DebugInvocation": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "quadlet-basic-network.service", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveMemoryHigh": "3698237440", "EffectiveMemoryMax": "3698237440", "EffectiveTasksMax": "22365", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman network create --ignore --subnet 192.168.29.0/24 --gateway 192.168.29.1 --label app=wordpress quadlet-basic-name ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman network create --ignore --subnet 192.168.29.0/24 --gateway 192.168.29.1 --label app=wordpress quadlet-basic-name ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/run/user/1111/systemd/generator/quadlet-basic-network.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "quadlet-basic-network.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "inherit", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13978", "LimitNPROCSoft": "13978", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13978", "LimitSIGPENDINGSoft": "13978", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LiveMountResult": "success", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureDurationUSec": "[not set]", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3691520000", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "[not set]", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "[not set]", "MemorySwapMax": "infinity", "MemorySwapPeak": "[not set]", "MemoryZSwapCurrent": "[not set]", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "quadlet-basic-network.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "200", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivatePIDs": "no", "PrivateTmp": "no", "PrivateTmpEx": "no", "PrivateUsers": "no", "PrivateUsersEx": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectControlGroupsEx": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "yes", "RemoveIPC": "no", "Requires": "basic.target app.slice", "RequiresMountsFor": "/run/user/1111/containers", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "app.slice", "SourcePath": "/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic.network", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-basic-network", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22365", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "infinity", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "oneshot", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "Wants": "podman-user-wait-network-online.service", "WantsMountsFor": "/home/user_quadlet_basic", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity", "WorkingDirectory": "!/home/user_quadlet_basic" } } TASK [fedora.linux_system_roles.podman : Restart service] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:125 Saturday 18 January 2025 11:30:18 -0500 (0:00:00.733) 0:00:59.318 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_service_started is changed", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 18 January 2025 11:30:18 -0500 (0:00:00.050) 0:00:59.368 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": { "Network": {} }, "__podman_quadlet_str": "", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 18 January 2025 11:30:18 -0500 (0:00:00.065) 0:00:59.433 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "created", "__podman_systemd_unit_scope": "", "__podman_user": "user_quadlet_basic" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 18 January 2025 11:30:18 -0500 (0:00:00.061) 0:00:59.495 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_quadlet_spec | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 18 January 2025 11:30:18 -0500 (0:00:00.037) 0:00:59.533 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-basic-unused-network", "__podman_quadlet_type": "network", "__podman_rootless": true }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 18 January 2025 11:30:18 -0500 (0:00:00.054) 0:00:59.587 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 18 January 2025 11:30:18 -0500 (0:00:00.116) 0:00:59.704 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 18 January 2025 11:30:18 -0500 (0:00:00.035) 0:00:59.740 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 18 January 2025 11:30:18 -0500 (0:00:00.034) 0:00:59.774 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_group": "1111" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 18 January 2025 11:30:18 -0500 (0:00:00.043) 0:00:59.818 ****** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1737217683.9532003, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1737217679.1402063, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9160785, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "1643853349", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 18 January 2025 11:30:18 -0500 (0:00:00.424) 0:01:00.242 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "getsubids", "user_quadlet_basic" ], "delta": "0:00:00.003914", "end": "2025-01-18 11:30:19.324828", "rc": 0, "start": "2025-01-18 11:30:19.320914" } STDOUT: 0: user_quadlet_basic 589824 65536 TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 18 January 2025 11:30:19 -0500 (0:00:00.398) 0:01:00.641 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "getsubids", "-g", "user_quadlet_basic" ], "delta": "0:00:00.005020", "end": "2025-01-18 11:30:19.745702", "rc": 0, "start": "2025-01-18 11:30:19.740682" } STDOUT: 0: user_quadlet_basic 589824 65536 TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 18 January 2025 11:30:19 -0500 (0:00:00.420) 0:01:01.061 ****** ok: [managed-node3] => { "ansible_facts": { "podman_subgid_info": { "user_quadlet_basic": { "range": 65536, "start": 589824 } }, "podman_subuid_info": { "user_quadlet_basic": { "range": 65536, "start": 589824 } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 18 January 2025 11:30:19 -0500 (0:00:00.048) 0:01:01.109 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 18 January 2025 11:30:19 -0500 (0:00:00.032) 0:01:01.141 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 18 January 2025 11:30:19 -0500 (0:00:00.047) 0:01:01.189 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 18 January 2025 11:30:19 -0500 (0:00:00.045) 0:01:01.235 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 18 January 2025 11:30:20 -0500 (0:00:00.049) 0:01:01.285 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 18 January 2025 11:30:20 -0500 (0:00:00.036) 0:01:01.322 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-basic-unused-network-network.service", "__podman_systemd_scope": "user", "__podman_user_home_dir": "/home/user_quadlet_basic", "__podman_xdg_runtime_dir": "/run/user/1111" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 18 January 2025 11:30:20 -0500 (0:00:00.061) 0:01:01.383 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_path": "/home/user_quadlet_basic/.config/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 18 January 2025 11:30:20 -0500 (0:00:00.037) 0:01:01.420 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_kube_yamls_raw | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 18 January 2025 11:30:20 -0500 (0:00:00.035) 0:01:01.456 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-unused-network.network", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 18 January 2025 11:30:20 -0500 (0:00:00.073) 0:01:01.529 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 18 January 2025 11:30:20 -0500 (0:00:00.038) 0:01:01.568 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_state == \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 18 January 2025 11:30:20 -0500 (0:00:00.029) 0:01:01.597 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2 Saturday 18 January 2025 11:30:20 -0500 (0:00:00.081) 0:01:01.679 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 18 January 2025 11:30:20 -0500 (0:00:00.134) 0:01:01.813 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "loginctl", "enable-linger", "user_quadlet_basic" ], "delta": null, "end": null, "rc": 0, "start": null } STDOUT: skipped, since /var/lib/systemd/linger/user_quadlet_basic exists MSG: Did not run command since '/var/lib/systemd/linger/user_quadlet_basic' exists TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 18 January 2025 11:30:20 -0500 (0:00:00.398) 0:01:02.211 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_cancel_user_linger": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 18 January 2025 11:30:21 -0500 (0:00:00.061) 0:01:02.273 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_item_state | d('present') == 'absent'", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create host directories] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7 Saturday 18 January 2025 11:30:21 -0500 (0:00:00.034) 0:01:02.307 ****** skipping: [managed-node3] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Ensure container images are present] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 Saturday 18 January 2025 11:30:21 -0500 (0:00:00.028) 0:01:02.336 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39 Saturday 18 January 2025 11:30:21 -0500 (0:00:00.030) 0:01:02.367 ****** ok: [managed-node3] => { "changed": false, "gid": 1111, "group": "user_quadlet_basic", "mode": "0755", "owner": "user_quadlet_basic", "path": "/home/user_quadlet_basic/.config/containers/systemd", "secontext": "unconfined_u:object_r:config_home_t:s0", "size": 35, "state": "directory", "uid": 1111 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ******** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:48 Saturday 18 January 2025 11:30:21 -0500 (0:00:00.393) 0:01:02.761 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_quadlet_file_src | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:58 Saturday 18 January 2025 11:30:21 -0500 (0:00:00.032) 0:01:02.793 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_quadlet_str | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] ******* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:70 Saturday 18 January 2025 11:30:21 -0500 (0:00:00.031) 0:01:02.825 ****** changed: [managed-node3] => { "changed": true, "checksum": "52c9d75ecaf81203cc1f1a3b1dd00fcd25067b01", "dest": "/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-unused-network.network", "gid": 1111, "group": "user_quadlet_basic", "md5sum": "968d495367b59475979615e4884cbda2", "mode": "0644", "owner": "user_quadlet_basic", "secontext": "unconfined_u:object_r:config_home_t:s0", "size": 54, "src": "/root/.ansible/tmp/ansible-tmp-1737217821.6253436-12902-156234989351049/.source.network", "state": "file", "uid": 1111 } TASK [fedora.linux_system_roles.podman : Reload systemctl] ********************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:82 Saturday 18 January 2025 11:30:22 -0500 (0:00:00.741) 0:01:03.567 ****** ok: [managed-node3] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Start service] ************************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110 Saturday 18 January 2025 11:30:22 -0500 (0:00:00.670) 0:01:04.237 ****** changed: [managed-node3] => { "changed": true, "name": "quadlet-basic-unused-network-network.service", "state": "started", "status": { "AccessSELinuxContext": "unconfined_u:object_r:user_tmp_t:s0", "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "podman-user-wait-network-online.service run-user-1111.mount -.mount basic.target app.slice", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "shutdown.target", "BindLogSockets": "no", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanLiveMount": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DebugInvocation": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "quadlet-basic-unused-network-network.service", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveMemoryHigh": "3698237440", "EffectiveMemoryMax": "3698237440", "EffectiveTasksMax": "22365", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman network create --ignore systemd-quadlet-basic-unused-network ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman network create --ignore systemd-quadlet-basic-unused-network ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/run/user/1111/systemd/generator/quadlet-basic-unused-network-network.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "quadlet-basic-unused-network-network.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "inherit", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13978", "LimitNPROCSoft": "13978", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13978", "LimitSIGPENDINGSoft": "13978", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LiveMountResult": "success", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureDurationUSec": "[not set]", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3691429888", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "[not set]", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "[not set]", "MemorySwapMax": "infinity", "MemorySwapPeak": "[not set]", "MemoryZSwapCurrent": "[not set]", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "quadlet-basic-unused-network-network.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "200", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivatePIDs": "no", "PrivateTmp": "no", "PrivateTmpEx": "no", "PrivateUsers": "no", "PrivateUsersEx": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectControlGroupsEx": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "yes", "RemoveIPC": "no", "Requires": "app.slice basic.target", "RequiresMountsFor": "/run/user/1111/containers", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "app.slice", "SourcePath": "/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-unused-network.network", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-basic-unused-network-network", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22365", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "infinity", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "oneshot", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "Wants": "podman-user-wait-network-online.service", "WantsMountsFor": "/home/user_quadlet_basic", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity", "WorkingDirectory": "!/home/user_quadlet_basic" } } TASK [fedora.linux_system_roles.podman : Restart service] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:125 Saturday 18 January 2025 11:30:23 -0500 (0:00:00.709) 0:01:04.946 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_service_started is changed", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 18 January 2025 11:30:23 -0500 (0:00:00.042) 0:01:04.989 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": { "Volume": { "VolumeName": "quadlet-basic-mysql-name" } }, "__podman_quadlet_str": "", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 18 January 2025 11:30:23 -0500 (0:00:00.069) 0:01:05.058 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "created", "__podman_systemd_unit_scope": "", "__podman_user": "user_quadlet_basic" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 18 January 2025 11:30:23 -0500 (0:00:00.043) 0:01:05.102 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_quadlet_spec | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 18 January 2025 11:30:23 -0500 (0:00:00.039) 0:01:05.141 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-basic-mysql", "__podman_quadlet_type": "volume", "__podman_rootless": true }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 18 January 2025 11:30:23 -0500 (0:00:00.053) 0:01:05.194 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 18 January 2025 11:30:24 -0500 (0:00:00.064) 0:01:05.259 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 18 January 2025 11:30:24 -0500 (0:00:00.038) 0:01:05.297 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 18 January 2025 11:30:24 -0500 (0:00:00.035) 0:01:05.332 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_group": "1111" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 18 January 2025 11:30:24 -0500 (0:00:00.174) 0:01:05.506 ****** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1737217683.9532003, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1737217679.1402063, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9160785, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "1643853349", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 18 January 2025 11:30:24 -0500 (0:00:00.391) 0:01:05.898 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "getsubids", "user_quadlet_basic" ], "delta": "0:00:00.003861", "end": "2025-01-18 11:30:24.975771", "rc": 0, "start": "2025-01-18 11:30:24.971910" } STDOUT: 0: user_quadlet_basic 589824 65536 TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 18 January 2025 11:30:25 -0500 (0:00:00.392) 0:01:06.291 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "getsubids", "-g", "user_quadlet_basic" ], "delta": "0:00:00.005515", "end": "2025-01-18 11:30:25.398828", "rc": 0, "start": "2025-01-18 11:30:25.393313" } STDOUT: 0: user_quadlet_basic 589824 65536 TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 18 January 2025 11:30:25 -0500 (0:00:00.421) 0:01:06.713 ****** ok: [managed-node3] => { "ansible_facts": { "podman_subgid_info": { "user_quadlet_basic": { "range": 65536, "start": 589824 } }, "podman_subuid_info": { "user_quadlet_basic": { "range": 65536, "start": 589824 } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 18 January 2025 11:30:25 -0500 (0:00:00.048) 0:01:06.761 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 18 January 2025 11:30:25 -0500 (0:00:00.032) 0:01:06.794 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 18 January 2025 11:30:25 -0500 (0:00:00.033) 0:01:06.827 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 18 January 2025 11:30:25 -0500 (0:00:00.036) 0:01:06.864 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 18 January 2025 11:30:25 -0500 (0:00:00.044) 0:01:06.909 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 18 January 2025 11:30:25 -0500 (0:00:00.045) 0:01:06.954 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-basic-mysql-volume.service", "__podman_systemd_scope": "user", "__podman_user_home_dir": "/home/user_quadlet_basic", "__podman_xdg_runtime_dir": "/run/user/1111" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 18 January 2025 11:30:25 -0500 (0:00:00.088) 0:01:07.042 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_path": "/home/user_quadlet_basic/.config/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 18 January 2025 11:30:25 -0500 (0:00:00.037) 0:01:07.080 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_kube_yamls_raw | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 18 January 2025 11:30:25 -0500 (0:00:00.039) 0:01:07.120 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-mysql.volume", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 18 January 2025 11:30:25 -0500 (0:00:00.084) 0:01:07.204 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 18 January 2025 11:30:25 -0500 (0:00:00.040) 0:01:07.244 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_state == \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 18 January 2025 11:30:26 -0500 (0:00:00.029) 0:01:07.274 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2 Saturday 18 January 2025 11:30:26 -0500 (0:00:00.065) 0:01:07.340 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 18 January 2025 11:30:26 -0500 (0:00:00.055) 0:01:07.396 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "loginctl", "enable-linger", "user_quadlet_basic" ], "delta": null, "end": null, "rc": 0, "start": null } STDOUT: skipped, since /var/lib/systemd/linger/user_quadlet_basic exists MSG: Did not run command since '/var/lib/systemd/linger/user_quadlet_basic' exists TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 18 January 2025 11:30:26 -0500 (0:00:00.396) 0:01:07.792 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_cancel_user_linger": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 18 January 2025 11:30:26 -0500 (0:00:00.113) 0:01:07.905 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_item_state | d('present') == 'absent'", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create host directories] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7 Saturday 18 January 2025 11:30:26 -0500 (0:00:00.055) 0:01:07.961 ****** skipping: [managed-node3] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Ensure container images are present] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 Saturday 18 January 2025 11:30:26 -0500 (0:00:00.048) 0:01:08.009 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39 Saturday 18 January 2025 11:30:26 -0500 (0:00:00.046) 0:01:08.056 ****** ok: [managed-node3] => { "changed": false, "gid": 1111, "group": "user_quadlet_basic", "mode": "0755", "owner": "user_quadlet_basic", "path": "/home/user_quadlet_basic/.config/containers/systemd", "secontext": "unconfined_u:object_r:config_home_t:s0", "size": 79, "state": "directory", "uid": 1111 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ******** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:48 Saturday 18 January 2025 11:30:27 -0500 (0:00:00.437) 0:01:08.493 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_quadlet_file_src | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:58 Saturday 18 January 2025 11:30:27 -0500 (0:00:00.037) 0:01:08.531 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_quadlet_str | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] ******* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:70 Saturday 18 January 2025 11:30:27 -0500 (0:00:00.054) 0:01:08.585 ****** changed: [managed-node3] => { "changed": true, "checksum": "90a3571bfc7670328fe3f8fb625585613dbd9c4a", "dest": "/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-mysql.volume", "gid": 1111, "group": "user_quadlet_basic", "md5sum": "8682d71bf3c086f228cd72389b7c9018", "mode": "0644", "owner": "user_quadlet_basic", "secontext": "unconfined_u:object_r:config_home_t:s0", "size": 89, "src": "/root/.ansible/tmp/ansible-tmp-1737217827.3964999-13194-44993693918340/.source.volume", "state": "file", "uid": 1111 } TASK [fedora.linux_system_roles.podman : Reload systemctl] ********************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:82 Saturday 18 January 2025 11:30:28 -0500 (0:00:00.849) 0:01:09.435 ****** ok: [managed-node3] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Start service] ************************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110 Saturday 18 January 2025 11:30:28 -0500 (0:00:00.668) 0:01:10.103 ****** changed: [managed-node3] => { "changed": true, "name": "quadlet-basic-mysql-volume.service", "state": "started", "status": { "AccessSELinuxContext": "unconfined_u:object_r:user_tmp_t:s0", "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "app.slice -.mount podman-user-wait-network-online.service run-user-1111.mount basic.target", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "shutdown.target", "BindLogSockets": "no", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanLiveMount": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DebugInvocation": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "quadlet-basic-mysql-volume.service", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveMemoryHigh": "3698237440", "EffectiveMemoryMax": "3698237440", "EffectiveTasksMax": "22365", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman volume create --ignore quadlet-basic-mysql-name ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman volume create --ignore quadlet-basic-mysql-name ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/run/user/1111/systemd/generator/quadlet-basic-mysql-volume.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "quadlet-basic-mysql-volume.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "inherit", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13978", "LimitNPROCSoft": "13978", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13978", "LimitSIGPENDINGSoft": "13978", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LiveMountResult": "success", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureDurationUSec": "[not set]", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3691384832", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "[not set]", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "[not set]", "MemorySwapMax": "infinity", "MemorySwapPeak": "[not set]", "MemoryZSwapCurrent": "[not set]", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "quadlet-basic-mysql-volume.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "200", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivatePIDs": "no", "PrivateTmp": "no", "PrivateTmpEx": "no", "PrivateUsers": "no", "PrivateUsersEx": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectControlGroupsEx": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "yes", "RemoveIPC": "no", "Requires": "app.slice basic.target", "RequiresMountsFor": "/run/user/1111/containers", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "app.slice", "SourcePath": "/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-mysql.volume", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-basic-mysql-volume", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22365", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "infinity", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "oneshot", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "Wants": "podman-user-wait-network-online.service", "WantsMountsFor": "/home/user_quadlet_basic", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity", "WorkingDirectory": "!/home/user_quadlet_basic" } } TASK [fedora.linux_system_roles.podman : Restart service] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:125 Saturday 18 January 2025 11:30:29 -0500 (0:00:00.677) 0:01:10.781 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_service_started is changed", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 18 January 2025 11:30:29 -0500 (0:00:00.033) 0:01:10.815 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": { "Volume": {} }, "__podman_quadlet_str": "", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 18 January 2025 11:30:29 -0500 (0:00:00.039) 0:01:10.854 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "created", "__podman_systemd_unit_scope": "", "__podman_user": "user_quadlet_basic" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 18 January 2025 11:30:29 -0500 (0:00:00.039) 0:01:10.894 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_quadlet_spec | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 18 January 2025 11:30:29 -0500 (0:00:00.035) 0:01:10.930 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-basic-unused-volume", "__podman_quadlet_type": "volume", "__podman_rootless": true }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 18 January 2025 11:30:29 -0500 (0:00:00.072) 0:01:11.003 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 18 January 2025 11:30:29 -0500 (0:00:00.093) 0:01:11.096 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 18 January 2025 11:30:29 -0500 (0:00:00.042) 0:01:11.138 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 18 January 2025 11:30:29 -0500 (0:00:00.043) 0:01:11.182 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_group": "1111" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 18 January 2025 11:30:30 -0500 (0:00:00.068) 0:01:11.251 ****** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1737217683.9532003, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1737217679.1402063, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9160785, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "1643853349", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 18 January 2025 11:30:30 -0500 (0:00:00.420) 0:01:11.672 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "getsubids", "user_quadlet_basic" ], "delta": "0:00:00.004097", "end": "2025-01-18 11:30:30.748797", "rc": 0, "start": "2025-01-18 11:30:30.744700" } STDOUT: 0: user_quadlet_basic 589824 65536 TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 18 January 2025 11:30:30 -0500 (0:00:00.389) 0:01:12.061 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "getsubids", "-g", "user_quadlet_basic" ], "delta": "0:00:00.005161", "end": "2025-01-18 11:30:31.139562", "rc": 0, "start": "2025-01-18 11:30:31.134401" } STDOUT: 0: user_quadlet_basic 589824 65536 TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 18 January 2025 11:30:31 -0500 (0:00:00.452) 0:01:12.514 ****** ok: [managed-node3] => { "ansible_facts": { "podman_subgid_info": { "user_quadlet_basic": { "range": 65536, "start": 589824 } }, "podman_subuid_info": { "user_quadlet_basic": { "range": 65536, "start": 589824 } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 18 January 2025 11:30:31 -0500 (0:00:00.047) 0:01:12.562 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 18 January 2025 11:30:31 -0500 (0:00:00.033) 0:01:12.595 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 18 January 2025 11:30:31 -0500 (0:00:00.032) 0:01:12.628 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 18 January 2025 11:30:31 -0500 (0:00:00.034) 0:01:12.663 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 18 January 2025 11:30:31 -0500 (0:00:00.050) 0:01:12.713 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 18 January 2025 11:30:31 -0500 (0:00:00.039) 0:01:12.753 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-basic-unused-volume-volume.service", "__podman_systemd_scope": "user", "__podman_user_home_dir": "/home/user_quadlet_basic", "__podman_xdg_runtime_dir": "/run/user/1111" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 18 January 2025 11:30:31 -0500 (0:00:00.053) 0:01:12.806 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_path": "/home/user_quadlet_basic/.config/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 18 January 2025 11:30:31 -0500 (0:00:00.035) 0:01:12.842 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_kube_yamls_raw | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 18 January 2025 11:30:31 -0500 (0:00:00.032) 0:01:12.875 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-unused-volume.volume", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 18 January 2025 11:30:31 -0500 (0:00:00.074) 0:01:12.950 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 18 January 2025 11:30:31 -0500 (0:00:00.038) 0:01:12.988 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_state == \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 18 January 2025 11:30:31 -0500 (0:00:00.031) 0:01:13.020 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2 Saturday 18 January 2025 11:30:31 -0500 (0:00:00.064) 0:01:13.084 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 18 January 2025 11:30:31 -0500 (0:00:00.050) 0:01:13.135 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "loginctl", "enable-linger", "user_quadlet_basic" ], "delta": null, "end": null, "rc": 0, "start": null } STDOUT: skipped, since /var/lib/systemd/linger/user_quadlet_basic exists MSG: Did not run command since '/var/lib/systemd/linger/user_quadlet_basic' exists TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 18 January 2025 11:30:32 -0500 (0:00:00.387) 0:01:13.522 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_cancel_user_linger": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 18 January 2025 11:30:32 -0500 (0:00:00.037) 0:01:13.560 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_item_state | d('present') == 'absent'", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create host directories] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7 Saturday 18 January 2025 11:30:32 -0500 (0:00:00.035) 0:01:13.596 ****** skipping: [managed-node3] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Ensure container images are present] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 Saturday 18 January 2025 11:30:32 -0500 (0:00:00.028) 0:01:13.624 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39 Saturday 18 January 2025 11:30:32 -0500 (0:00:00.030) 0:01:13.655 ****** ok: [managed-node3] => { "changed": false, "gid": 1111, "group": "user_quadlet_basic", "mode": "0755", "owner": "user_quadlet_basic", "path": "/home/user_quadlet_basic/.config/containers/systemd", "secontext": "unconfined_u:object_r:config_home_t:s0", "size": 113, "state": "directory", "uid": 1111 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ******** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:48 Saturday 18 January 2025 11:30:32 -0500 (0:00:00.451) 0:01:14.107 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_quadlet_file_src | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:58 Saturday 18 January 2025 11:30:32 -0500 (0:00:00.032) 0:01:14.139 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_quadlet_str | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] ******* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:70 Saturday 18 January 2025 11:30:32 -0500 (0:00:00.032) 0:01:14.171 ****** changed: [managed-node3] => { "changed": true, "checksum": "fd0ae560360afa5541b866560b1e849d25e216ef", "dest": "/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-unused-volume.volume", "gid": 1111, "group": "user_quadlet_basic", "md5sum": "4967598a0284ad3e296ab106829a30a2", "mode": "0644", "owner": "user_quadlet_basic", "secontext": "unconfined_u:object_r:config_home_t:s0", "size": 53, "src": "/root/.ansible/tmp/ansible-tmp-1737217832.9710007-13447-47959490018625/.source.volume", "state": "file", "uid": 1111 } TASK [fedora.linux_system_roles.podman : Reload systemctl] ********************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:82 Saturday 18 January 2025 11:30:33 -0500 (0:00:00.732) 0:01:14.904 ****** ok: [managed-node3] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Start service] ************************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110 Saturday 18 January 2025 11:30:34 -0500 (0:00:00.651) 0:01:15.555 ****** changed: [managed-node3] => { "changed": true, "name": "quadlet-basic-unused-volume-volume.service", "state": "started", "status": { "AccessSELinuxContext": "unconfined_u:object_r:user_tmp_t:s0", "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "basic.target run-user-1111.mount podman-user-wait-network-online.service app.slice -.mount", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "shutdown.target", "BindLogSockets": "no", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanLiveMount": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DebugInvocation": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "quadlet-basic-unused-volume-volume.service", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveMemoryHigh": "3698237440", "EffectiveMemoryMax": "3698237440", "EffectiveTasksMax": "22365", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman volume create --ignore systemd-quadlet-basic-unused-volume ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman volume create --ignore systemd-quadlet-basic-unused-volume ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/run/user/1111/systemd/generator/quadlet-basic-unused-volume-volume.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "quadlet-basic-unused-volume-volume.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "inherit", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13978", "LimitNPROCSoft": "13978", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13978", "LimitSIGPENDINGSoft": "13978", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LiveMountResult": "success", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureDurationUSec": "[not set]", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3691347968", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "[not set]", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "[not set]", "MemorySwapMax": "infinity", "MemorySwapPeak": "[not set]", "MemoryZSwapCurrent": "[not set]", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "quadlet-basic-unused-volume-volume.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "200", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivatePIDs": "no", "PrivateTmp": "no", "PrivateTmpEx": "no", "PrivateUsers": "no", "PrivateUsersEx": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectControlGroupsEx": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "yes", "RemoveIPC": "no", "Requires": "app.slice basic.target", "RequiresMountsFor": "/run/user/1111/containers", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "app.slice", "SourcePath": "/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-unused-volume.volume", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-basic-unused-volume-volume", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22365", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "infinity", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "oneshot", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "Wants": "podman-user-wait-network-online.service", "WantsMountsFor": "/home/user_quadlet_basic", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity", "WorkingDirectory": "!/home/user_quadlet_basic" } } TASK [fedora.linux_system_roles.podman : Restart service] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:125 Saturday 18 January 2025 11:30:34 -0500 (0:00:00.683) 0:01:16.239 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_service_started is changed", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 18 January 2025 11:30:35 -0500 (0:00:00.032) 0:01:16.271 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": { "Container": { "ContainerName": "quadlet-basic-mysql-name", "Environment": [ "FOO=/bin/busybox-extras", "BAZ=test" ], "Image": "quay.io/linux-system-roles/mysql:5.6", "Network": "quadlet-basic.network", "PodmanArgs": "--secret=mysql_container_root_password,type=env,target=MYSQL_ROOT_PASSWORD --secret=json_secret,type=mount,target=/tmp/test.json", "Volume": "quadlet-basic-mysql.volume:/var/lib/mysql" }, "Install": { "WantedBy": "default.target" } }, "__podman_quadlet_str": "", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 18 January 2025 11:30:35 -0500 (0:00:00.042) 0:01:16.314 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "created", "__podman_systemd_unit_scope": "", "__podman_user": "user_quadlet_basic" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 18 January 2025 11:30:35 -0500 (0:00:00.039) 0:01:16.353 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_quadlet_spec | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 18 January 2025 11:30:35 -0500 (0:00:00.035) 0:01:16.388 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-basic-mysql", "__podman_quadlet_type": "container", "__podman_rootless": true }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 18 January 2025 11:30:35 -0500 (0:00:00.068) 0:01:16.457 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 18 January 2025 11:30:35 -0500 (0:00:00.072) 0:01:16.529 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 18 January 2025 11:30:35 -0500 (0:00:00.049) 0:01:16.579 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 18 January 2025 11:30:35 -0500 (0:00:00.056) 0:01:16.636 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_group": "1111" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 18 January 2025 11:30:35 -0500 (0:00:00.067) 0:01:16.703 ****** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1737217683.9532003, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1737217679.1402063, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9160785, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "1643853349", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 18 January 2025 11:30:35 -0500 (0:00:00.403) 0:01:17.106 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "getsubids", "user_quadlet_basic" ], "delta": "0:00:00.004418", "end": "2025-01-18 11:30:36.188267", "rc": 0, "start": "2025-01-18 11:30:36.183849" } STDOUT: 0: user_quadlet_basic 589824 65536 TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 18 January 2025 11:30:36 -0500 (0:00:00.399) 0:01:17.506 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "getsubids", "-g", "user_quadlet_basic" ], "delta": "0:00:00.004955", "end": "2025-01-18 11:30:36.613745", "rc": 0, "start": "2025-01-18 11:30:36.608790" } STDOUT: 0: user_quadlet_basic 589824 65536 TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 18 January 2025 11:30:36 -0500 (0:00:00.431) 0:01:17.937 ****** ok: [managed-node3] => { "ansible_facts": { "podman_subgid_info": { "user_quadlet_basic": { "range": 65536, "start": 589824 } }, "podman_subuid_info": { "user_quadlet_basic": { "range": 65536, "start": 589824 } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 18 January 2025 11:30:36 -0500 (0:00:00.048) 0:01:17.986 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 18 January 2025 11:30:36 -0500 (0:00:00.032) 0:01:18.018 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 18 January 2025 11:30:36 -0500 (0:00:00.091) 0:01:18.110 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 18 January 2025 11:30:36 -0500 (0:00:00.033) 0:01:18.143 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 18 January 2025 11:30:36 -0500 (0:00:00.034) 0:01:18.178 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 18 January 2025 11:30:36 -0500 (0:00:00.033) 0:01:18.211 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [ "quay.io/linux-system-roles/mysql:5.6" ], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-basic-mysql.service", "__podman_systemd_scope": "user", "__podman_user_home_dir": "/home/user_quadlet_basic", "__podman_xdg_runtime_dir": "/run/user/1111" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 18 January 2025 11:30:37 -0500 (0:00:00.053) 0:01:18.265 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_path": "/home/user_quadlet_basic/.config/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 18 January 2025 11:30:37 -0500 (0:00:00.034) 0:01:18.299 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_kube_yamls_raw | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 18 January 2025 11:30:37 -0500 (0:00:00.033) 0:01:18.333 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_images": [ "quay.io/linux-system-roles/mysql:5.6" ], "__podman_quadlet_file": "/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-mysql.container", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 18 January 2025 11:30:37 -0500 (0:00:00.075) 0:01:18.408 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 18 January 2025 11:30:37 -0500 (0:00:00.045) 0:01:18.454 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_state == \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 18 January 2025 11:30:37 -0500 (0:00:00.031) 0:01:18.485 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2 Saturday 18 January 2025 11:30:37 -0500 (0:00:00.108) 0:01:18.593 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 18 January 2025 11:30:37 -0500 (0:00:00.067) 0:01:18.661 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "loginctl", "enable-linger", "user_quadlet_basic" ], "delta": null, "end": null, "rc": 0, "start": null } STDOUT: skipped, since /var/lib/systemd/linger/user_quadlet_basic exists MSG: Did not run command since '/var/lib/systemd/linger/user_quadlet_basic' exists TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 18 January 2025 11:30:37 -0500 (0:00:00.401) 0:01:19.062 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_cancel_user_linger": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 18 January 2025 11:30:37 -0500 (0:00:00.037) 0:01:19.100 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_item_state | d('present') == 'absent'", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create host directories] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7 Saturday 18 January 2025 11:30:37 -0500 (0:00:00.033) 0:01:19.133 ****** skipping: [managed-node3] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Ensure container images are present] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 Saturday 18 January 2025 11:30:37 -0500 (0:00:00.026) 0:01:19.160 ****** changed: [managed-node3] => (item=None) => { "attempts": 1, "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } changed: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39 Saturday 18 January 2025 11:30:45 -0500 (0:00:07.835) 0:01:26.996 ****** ok: [managed-node3] => { "changed": false, "gid": 1111, "group": "user_quadlet_basic", "mode": "0755", "owner": "user_quadlet_basic", "path": "/home/user_quadlet_basic/.config/containers/systemd", "secontext": "unconfined_u:object_r:config_home_t:s0", "size": 155, "state": "directory", "uid": 1111 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ******** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:48 Saturday 18 January 2025 11:30:46 -0500 (0:00:00.450) 0:01:27.446 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_quadlet_file_src | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:58 Saturday 18 January 2025 11:30:46 -0500 (0:00:00.036) 0:01:27.483 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_quadlet_str | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] ******* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:70 Saturday 18 January 2025 11:30:46 -0500 (0:00:00.033) 0:01:27.517 ****** changed: [managed-node3] => { "changed": true, "checksum": "0b6cac7929623f1059e78ef39b8b0a25169b28a6", "dest": "/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-mysql.container", "gid": 1111, "group": "user_quadlet_basic", "md5sum": "1ede2d50fe62a3ca756acb50f2f6868e", "mode": "0644", "owner": "user_quadlet_basic", "secontext": "unconfined_u:object_r:config_home_t:s0", "size": 448, "src": "/root/.ansible/tmp/ansible-tmp-1737217846.314918-13836-218565630821699/.source.container", "state": "file", "uid": 1111 } TASK [fedora.linux_system_roles.podman : Reload systemctl] ********************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:82 Saturday 18 January 2025 11:30:47 -0500 (0:00:00.793) 0:01:28.310 ****** ok: [managed-node3] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Start service] ************************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110 Saturday 18 January 2025 11:30:47 -0500 (0:00:00.676) 0:01:28.987 ****** changed: [managed-node3] => { "changed": true, "name": "quadlet-basic-mysql.service", "state": "started", "status": { "AccessSELinuxContext": "unconfined_u:object_r:user_tmp_t:s0", "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "podman-user-wait-network-online.service app.slice quadlet-basic-mysql-volume.service basic.target quadlet-basic-network.service -.mount run-user-1111.mount", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "default.target shutdown.target", "BindLogSockets": "no", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanLiveMount": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DebugInvocation": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "yes", "DelegateControllers": "cpu cpuset io memory pids", "Description": "quadlet-basic-mysql.service", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveMemoryHigh": "3698237440", "EffectiveMemoryMax": "3698237440", "EffectiveTasksMax": "22365", "Environment": "PODMAN_SYSTEMD_UNIT=quadlet-basic-mysql.service", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman run --name quadlet-basic-mysql-name --cidfile=/run/user/1111/quadlet-basic-mysql.cid --replace --rm --cgroups=split --network quadlet-basic-name --sdnotify=conmon -d -v quadlet-basic-mysql-name:/var/lib/mysql --env BAZ=test --env FOO=/bin/busybox-extras --secret=mysql_container_root_password,type=env,target=MYSQL_ROOT_PASSWORD --secret=json_secret,type=mount,target=/tmp/test.json quay.io/linux-system-roles/mysql:5.6 ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman run --name quadlet-basic-mysql-name --cidfile=/run/user/1111/quadlet-basic-mysql.cid --replace --rm --cgroups=split --network quadlet-basic-name --sdnotify=conmon -d -v quadlet-basic-mysql-name:/var/lib/mysql --env BAZ=test --env FOO=/bin/busybox-extras --secret=mysql_container_root_password,type=env,target=MYSQL_ROOT_PASSWORD --secret=json_secret,type=mount,target=/tmp/test.json quay.io/linux-system-roles/mysql:5.6 ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStop": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/user/1111/quadlet-basic-mysql.cid ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/user/1111/quadlet-basic-mysql.cid ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPost": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/user/1111/quadlet-basic-mysql.cid ; ignore_errors=yes ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPostEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/user/1111/quadlet-basic-mysql.cid ; flags=ignore-failure ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/run/user/1111/systemd/generator/quadlet-basic-mysql.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "quadlet-basic-mysql.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "inherit", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13978", "LimitNPROCSoft": "13978", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13978", "LimitSIGPENDINGSoft": "13978", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LiveMountResult": "success", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureDurationUSec": "[not set]", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3363971072", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "[not set]", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "[not set]", "MemorySwapMax": "infinity", "MemorySwapPeak": "[not set]", "MemoryZSwapCurrent": "[not set]", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "quadlet-basic-mysql.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "all", "OOMPolicy": "continue", "OOMScoreAdjust": "200", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivatePIDs": "no", "PrivateTmp": "no", "PrivateTmpEx": "no", "PrivateUsers": "no", "PrivateUsersEx": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectControlGroupsEx": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "basic.target app.slice quadlet-basic-mysql-volume.service quadlet-basic-network.service", "RequiresMountsFor": "/run/user/1111/containers", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "app.slice", "SourcePath": "/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-mysql.container", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-basic-mysql", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22365", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "WantedBy": "default.target", "Wants": "podman-user-wait-network-online.service", "WantsMountsFor": "/home/user_quadlet_basic", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity", "WorkingDirectory": "!/home/user_quadlet_basic" } } TASK [fedora.linux_system_roles.podman : Restart service] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:125 Saturday 18 January 2025 11:30:48 -0500 (0:00:01.065) 0:01:30.052 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_service_started is changed", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Cancel linger] ************************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:196 Saturday 18 January 2025 11:30:48 -0500 (0:00:00.034) 0:01:30.087 ****** skipping: [managed-node3] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Handle credential files - absent] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:202 Saturday 18 January 2025 11:30:48 -0500 (0:00:00.027) 0:01:30.114 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - absent] ******** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:211 Saturday 18 January 2025 11:30:48 -0500 (0:00:00.028) 0:01:30.143 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [Check files] ************************************************************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_basic.yml:201 Saturday 18 January 2025 11:30:48 -0500 (0:00:00.043) 0:01:30.186 ****** ok: [managed-node3] => (item=quadlet-basic-mysql.container) => { "ansible_loop_var": "item", "changed": false, "cmd": [ "cat", "/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-mysql.container" ], "delta": "0:00:00.003397", "end": "2025-01-18 11:30:49.263277", "item": "quadlet-basic-mysql.container", "rc": 0, "start": "2025-01-18 11:30:49.259880" } STDOUT: # # Ansible managed # # system_role:podman [Install] WantedBy=default.target [Container] Image=quay.io/linux-system-roles/mysql:5.6 ContainerName=quadlet-basic-mysql-name Volume=quadlet-basic-mysql.volume:/var/lib/mysql Network=quadlet-basic.network PodmanArgs=--secret=mysql_container_root_password,type=env,target=MYSQL_ROOT_PASSWORD --secret=json_secret,type=mount,target=/tmp/test.json Environment=FOO=/bin/busybox-extras Environment=BAZ=test ok: [managed-node3] => (item=quadlet-basic.network) => { "ansible_loop_var": "item", "changed": false, "cmd": [ "cat", "/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic.network" ], "delta": "0:00:00.003093", "end": "2025-01-18 11:30:49.633004", "item": "quadlet-basic.network", "rc": 0, "start": "2025-01-18 11:30:49.629911" } STDOUT: [Network] Subnet=192.168.29.0/24 Gateway=192.168.29.1 Label=app=wordpress NetworkName=quadlet-basic-name ok: [managed-node3] => (item=quadlet-basic-mysql.volume) => { "ansible_loop_var": "item", "changed": false, "cmd": [ "cat", "/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-mysql.volume" ], "delta": "0:00:01.004309", "end": "2025-01-18 11:30:50.983645", "item": "quadlet-basic-mysql.volume", "rc": 0, "start": "2025-01-18 11:30:49.979336" } STDOUT: # # Ansible managed # # system_role:podman [Volume] VolumeName=quadlet-basic-mysql-name TASK [Ensure linger] *********************************************************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_basic.yml:211 Saturday 18 January 2025 11:30:51 -0500 (0:00:02.134) 0:01:32.321 ****** ok: [managed-node3] => { "changed": false, "failed_when_result": false, "stat": { "atime": 1737217807.401121, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709", "ctime": 1737217807.401121, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 4337466, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "inode/x-empty", "mode": "0644", "mtime": 1737217807.401121, "nlink": 1, "path": "/var/lib/systemd/linger/user_quadlet_basic", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 0, "uid": 0, "version": "2120237788", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [Cleanup user] ************************************************************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_basic.yml:219 Saturday 18 January 2025 11:30:51 -0500 (0:00:00.439) 0:01:32.761 ****** included: fedora.linux_system_roles.podman for managed-node3 TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:3 Saturday 18 January 2025 11:30:51 -0500 (0:00:00.161) 0:01:32.923 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure ansible_facts used by role] **** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:3 Saturday 18 January 2025 11:30:51 -0500 (0:00:00.084) 0:01:33.007 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if system is ostree] ************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:11 Saturday 18 January 2025 11:30:51 -0500 (0:00:00.041) 0:01:33.049 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag to indicate system is ostree] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:16 Saturday 18 January 2025 11:30:51 -0500 (0:00:00.042) 0:01:33.092 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:23 Saturday 18 January 2025 11:30:51 -0500 (0:00:00.053) 0:01:33.145 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag if transactional-update exists] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:28 Saturday 18 January 2025 11:30:51 -0500 (0:00:00.048) 0:01:33.194 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:32 Saturday 18 January 2025 11:30:51 -0500 (0:00:00.039) 0:01:33.234 ****** ok: [managed-node3] => (item=RedHat.yml) => { "ansible_facts": { "__podman_packages": [ "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/vars/RedHat.yml" ], "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml" } skipping: [managed-node3] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node3] => (item=CentOS_10.yml) => { "ansible_facts": { "__podman_packages": [ "iptables-nft", "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } ok: [managed-node3] => (item=CentOS_10.yml) => { "ansible_facts": { "__podman_packages": [ "iptables-nft", "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } TASK [fedora.linux_system_roles.podman : Gather the package facts] ************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 Saturday 18 January 2025 11:30:52 -0500 (0:00:00.077) 0:01:33.312 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Enable copr if requested] ************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:10 Saturday 18 January 2025 11:30:53 -0500 (0:00:01.062) 0:01:34.374 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_use_copr | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure required packages are installed] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:14 Saturday 18 January 2025 11:30:53 -0500 (0:00:00.049) 0:01:34.424 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "(__podman_packages | difference(ansible_facts.packages))", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:28 Saturday 18 January 2025 11:30:53 -0500 (0:00:00.054) 0:01:34.479 ****** skipping: [managed-node3] => { "false_condition": "__podman_is_transactional | d(false)" } TASK [fedora.linux_system_roles.podman : Reboot transactional update systems] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:33 Saturday 18 January 2025 11:30:53 -0500 (0:00:00.051) 0:01:34.530 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if reboot is needed and not set] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:38 Saturday 18 January 2025 11:30:53 -0500 (0:00:00.048) 0:01:34.579 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get podman version] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:46 Saturday 18 January 2025 11:30:53 -0500 (0:00:00.056) 0:01:34.635 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "--version" ], "delta": "0:00:00.024778", "end": "2025-01-18 11:30:53.748446", "rc": 0, "start": "2025-01-18 11:30:53.723668" } STDOUT: podman version 5.3.1 TASK [fedora.linux_system_roles.podman : Set podman version] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:52 Saturday 18 January 2025 11:30:53 -0500 (0:00:00.428) 0:01:35.064 ****** ok: [managed-node3] => { "ansible_facts": { "podman_version": "5.3.1" }, "changed": false } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.2 or later] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:56 Saturday 18 January 2025 11:30:53 -0500 (0:00:00.035) 0:01:35.099 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_version is version(\"4.2\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:63 Saturday 18 January 2025 11:30:53 -0500 (0:00:00.034) 0:01:35.134 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_version is version(\"4.4\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:73 Saturday 18 January 2025 11:30:53 -0500 (0:00:00.103) 0:01:35.237 ****** META: end_host conditional evaluated to False, continuing execution for managed-node3 skipping: [managed-node3] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node3" } MSG: end_host conditional evaluated to false, continuing execution for managed-node3 TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:80 Saturday 18 January 2025 11:30:54 -0500 (0:00:00.107) 0:01:35.345 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__has_type_pod or __has_pod_file_ext or __has_pod_file_src_ext or __has_pod_template_src_ext or __has_pod_template_src_ext_j2", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:96 Saturday 18 January 2025 11:30:54 -0500 (0:00:00.095) 0:01:35.441 ****** META: end_host conditional evaluated to False, continuing execution for managed-node3 skipping: [managed-node3] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node3" } MSG: end_host conditional evaluated to false, continuing execution for managed-node3 TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:109 Saturday 18 January 2025 11:30:54 -0500 (0:00:00.099) 0:01:35.540 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 18 January 2025 11:30:54 -0500 (0:00:00.101) 0:01:35.642 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 18 January 2025 11:30:54 -0500 (0:00:00.059) 0:01:35.701 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 18 January 2025 11:30:54 -0500 (0:00:00.077) 0:01:35.778 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_group": "1111" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 18 January 2025 11:30:54 -0500 (0:00:00.134) 0:01:35.913 ****** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1737217683.9532003, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1737217679.1402063, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9160785, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "1643853349", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 18 January 2025 11:30:55 -0500 (0:00:00.429) 0:01:36.343 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "getsubids", "user_quadlet_basic" ], "delta": "0:00:00.003938", "end": "2025-01-18 11:30:55.428774", "rc": 0, "start": "2025-01-18 11:30:55.424836" } STDOUT: 0: user_quadlet_basic 589824 65536 TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 18 January 2025 11:30:55 -0500 (0:00:00.404) 0:01:36.747 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "getsubids", "-g", "user_quadlet_basic" ], "delta": "0:00:00.005390", "end": "2025-01-18 11:30:55.856620", "rc": 0, "start": "2025-01-18 11:30:55.851230" } STDOUT: 0: user_quadlet_basic 589824 65536 TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 18 January 2025 11:30:55 -0500 (0:00:00.440) 0:01:37.188 ****** ok: [managed-node3] => { "ansible_facts": { "podman_subgid_info": { "user_quadlet_basic": { "range": 65536, "start": 589824 } }, "podman_subuid_info": { "user_quadlet_basic": { "range": 65536, "start": 589824 } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 18 January 2025 11:30:56 -0500 (0:00:00.080) 0:01:37.269 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 18 January 2025 11:30:56 -0500 (0:00:00.056) 0:01:37.325 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 18 January 2025 11:30:56 -0500 (0:00:00.055) 0:01:37.381 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 18 January 2025 11:30:56 -0500 (0:00:00.054) 0:01:37.435 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 18 January 2025 11:30:56 -0500 (0:00:00.055) 0:01:37.491 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set config file paths] **************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:115 Saturday 18 January 2025 11:30:56 -0500 (0:00:00.039) 0:01:37.530 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_container_conf_file": "/home/user_quadlet_basic/.config/containers/containers.conf.d/50-systemroles.conf", "__podman_policy_json_file": "/home/user_quadlet_basic/.config/containers/policy.json", "__podman_registries_conf_file": "/home/user_quadlet_basic/.config/containers/registries.conf.d/50-systemroles.conf", "__podman_storage_conf_file": "/home/user_quadlet_basic/.config/containers/storage.conf" }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle container.conf.d] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:124 Saturday 18 January 2025 11:30:56 -0500 (0:00:00.046) 0:01:37.576 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure containers.d exists] *********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:5 Saturday 18 January 2025 11:30:56 -0500 (0:00:00.063) 0:01:37.640 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update container config file] ********* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:13 Saturday 18 January 2025 11:30:56 -0500 (0:00:00.032) 0:01:37.673 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle registries.conf.d] ************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:127 Saturday 18 January 2025 11:30:56 -0500 (0:00:00.031) 0:01:37.704 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure registries.d exists] *********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:5 Saturday 18 January 2025 11:30:56 -0500 (0:00:00.056) 0:01:37.761 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update registries config file] ******** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:13 Saturday 18 January 2025 11:30:56 -0500 (0:00:00.032) 0:01:37.794 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle storage.conf] ****************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:130 Saturday 18 January 2025 11:30:56 -0500 (0:00:00.033) 0:01:37.827 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure storage.conf parent dir exists] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:5 Saturday 18 January 2025 11:30:56 -0500 (0:00:00.114) 0:01:37.941 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update storage config file] *********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:13 Saturday 18 January 2025 11:30:56 -0500 (0:00:00.055) 0:01:37.997 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle policy.json] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:133 Saturday 18 January 2025 11:30:56 -0500 (0:00:00.129) 0:01:38.127 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure policy.json parent dir exists] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:6 Saturday 18 January 2025 11:30:56 -0500 (0:00:00.105) 0:01:38.232 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat the policy.json file] ************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:14 Saturday 18 January 2025 11:30:57 -0500 (0:00:00.053) 0:01:38.286 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get the existing policy.json] ********* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:19 Saturday 18 January 2025 11:30:57 -0500 (0:00:00.053) 0:01:38.340 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Write new policy.json file] *********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:25 Saturday 18 January 2025 11:30:57 -0500 (0:00:00.052) 0:01:38.392 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [Manage firewall for specified ports] ************************************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:139 Saturday 18 January 2025 11:30:57 -0500 (0:00:00.054) 0:01:38.446 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_firewall | length > 0", "skip_reason": "Conditional result was False" } TASK [Manage selinux for specified ports] ************************************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:146 Saturday 18 January 2025 11:30:57 -0500 (0:00:00.050) 0:01:38.497 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_selinux_ports | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Keep track of users that need to cancel linger] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:153 Saturday 18 January 2025 11:30:57 -0500 (0:00:00.051) 0:01:38.548 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_cancel_user_linger": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - present] ******* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:157 Saturday 18 January 2025 11:30:57 -0500 (0:00:00.050) 0:01:38.599 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle credential files - present] **** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:166 Saturday 18 January 2025 11:30:57 -0500 (0:00:00.046) 0:01:38.645 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle secrets] *********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:175 Saturday 18 January 2025 11:30:57 -0500 (0:00:00.044) 0:01:38.690 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml for managed-node3 => (item=(censored due to no_log)) included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml for managed-node3 => (item=(censored due to no_log)) TASK [fedora.linux_system_roles.podman : Set variables part 1] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:3 Saturday 18 January 2025 11:30:57 -0500 (0:00:00.170) 0:01:38.860 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_user": "user_quadlet_basic" }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:7 Saturday 18 January 2025 11:30:57 -0500 (0:00:00.054) 0:01:38.915 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node3 => (item=(censored due to no_log)) TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 18 January 2025 11:30:57 -0500 (0:00:00.098) 0:01:39.014 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 18 January 2025 11:30:57 -0500 (0:00:00.057) 0:01:39.072 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 18 January 2025 11:30:57 -0500 (0:00:00.053) 0:01:39.126 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_group": "1111" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 18 January 2025 11:30:57 -0500 (0:00:00.075) 0:01:39.201 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 18 January 2025 11:30:58 -0500 (0:00:00.053) 0:01:39.255 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 18 January 2025 11:30:58 -0500 (0:00:00.126) 0:01:39.381 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 18 January 2025 11:30:58 -0500 (0:00:00.037) 0:01:39.418 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 18 January 2025 11:30:58 -0500 (0:00:00.037) 0:01:39.455 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 18 January 2025 11:30:58 -0500 (0:00:00.036) 0:01:39.492 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 18 January 2025 11:30:58 -0500 (0:00:00.032) 0:01:39.524 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 18 January 2025 11:30:58 -0500 (0:00:00.030) 0:01:39.555 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 18 January 2025 11:30:58 -0500 (0:00:00.033) 0:01:39.588 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set variables part 2] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:14 Saturday 18 January 2025 11:30:58 -0500 (0:00:00.031) 0:01:39.619 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_rootless": true, "__podman_xdg_runtime_dir": "/run/user/1111" }, "changed": false } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:20 Saturday 18 January 2025 11:30:58 -0500 (0:00:00.040) 0:01:39.660 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 18 January 2025 11:30:58 -0500 (0:00:00.064) 0:01:39.724 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_item_state | d('present') != 'absent'", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 18 January 2025 11:30:58 -0500 (0:00:00.053) 0:01:39.778 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_item_state | d('present') != 'absent'", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 18 January 2025 11:30:58 -0500 (0:00:00.041) 0:01:39.819 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_cancel_user_linger": [ "user_quadlet_basic" ] }, "changed": false } TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:25 Saturday 18 January 2025 11:30:58 -0500 (0:00:00.042) 0:01:39.862 ****** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1737217807.4361207, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "ctime": 1737217848.8281062, "dev": 43, "device_type": 0, "executable": true, "exists": true, "gid": 1111, "gr_name": "user_quadlet_basic", "inode": 1, "isblk": false, "ischr": false, "isdir": true, "isfifo": false, "isgid": false, "islnk": false, "isreg": false, "issock": false, "isuid": false, "mimetype": "inode/directory", "mode": "0700", "mtime": 1737217848.8281062, "nlink": 7, "path": "/run/user/1111", "pw_name": "user_quadlet_basic", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 160, "uid": 1111, "version": null, "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": true } } TASK [fedora.linux_system_roles.podman : Manage each secret] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:41 Saturday 18 January 2025 11:30:59 -0500 (0:00:00.418) 0:01:40.280 ****** changed: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } TASK [fedora.linux_system_roles.podman : Set variables part 1] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:3 Saturday 18 January 2025 11:30:59 -0500 (0:00:00.525) 0:01:40.805 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_user": "user_quadlet_basic" }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:7 Saturday 18 January 2025 11:30:59 -0500 (0:00:00.039) 0:01:40.845 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node3 => (item=(censored due to no_log)) TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 18 January 2025 11:30:59 -0500 (0:00:00.055) 0:01:40.900 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 18 January 2025 11:30:59 -0500 (0:00:00.035) 0:01:40.936 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 18 January 2025 11:30:59 -0500 (0:00:00.035) 0:01:40.972 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_group": "1111" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 18 January 2025 11:30:59 -0500 (0:00:00.043) 0:01:41.015 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 18 January 2025 11:30:59 -0500 (0:00:00.095) 0:01:41.110 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 18 January 2025 11:30:59 -0500 (0:00:00.039) 0:01:41.149 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 18 January 2025 11:30:59 -0500 (0:00:00.032) 0:01:41.181 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 18 January 2025 11:30:59 -0500 (0:00:00.030) 0:01:41.212 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 18 January 2025 11:30:59 -0500 (0:00:00.032) 0:01:41.244 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 18 January 2025 11:31:00 -0500 (0:00:00.030) 0:01:41.275 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 18 January 2025 11:31:00 -0500 (0:00:00.032) 0:01:41.307 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 18 January 2025 11:31:00 -0500 (0:00:00.031) 0:01:41.339 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set variables part 2] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:14 Saturday 18 January 2025 11:31:00 -0500 (0:00:00.036) 0:01:41.376 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_rootless": true, "__podman_xdg_runtime_dir": "/run/user/1111" }, "changed": false } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:20 Saturday 18 January 2025 11:31:00 -0500 (0:00:00.055) 0:01:41.431 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 18 January 2025 11:31:00 -0500 (0:00:00.069) 0:01:41.501 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_item_state | d('present') != 'absent'", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 18 January 2025 11:31:00 -0500 (0:00:00.041) 0:01:41.542 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_item_state | d('present') != 'absent'", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 18 January 2025 11:31:00 -0500 (0:00:00.043) 0:01:41.586 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_cancel_user_linger": [ "user_quadlet_basic" ] }, "changed": false } TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:25 Saturday 18 January 2025 11:31:00 -0500 (0:00:00.042) 0:01:41.629 ****** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1737217807.4361207, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "ctime": 1737217848.8281062, "dev": 43, "device_type": 0, "executable": true, "exists": true, "gid": 1111, "gr_name": "user_quadlet_basic", "inode": 1, "isblk": false, "ischr": false, "isdir": true, "isfifo": false, "isgid": false, "islnk": false, "isreg": false, "issock": false, "isuid": false, "mimetype": "inode/directory", "mode": "0700", "mtime": 1737217848.8281062, "nlink": 7, "path": "/run/user/1111", "pw_name": "user_quadlet_basic", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 160, "uid": 1111, "version": null, "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": true } } TASK [fedora.linux_system_roles.podman : Manage each secret] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:41 Saturday 18 January 2025 11:31:00 -0500 (0:00:00.405) 0:01:42.034 ****** changed: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } TASK [fedora.linux_system_roles.podman : Handle Kubernetes specifications] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:182 Saturday 18 January 2025 11:31:01 -0500 (0:00:00.575) 0:01:42.610 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Quadlet specifications] ******** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:189 Saturday 18 January 2025 11:31:01 -0500 (0:00:00.029) 0:01:42.639 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node3 => (item=(censored due to no_log)) included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node3 => (item=(censored due to no_log)) included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node3 => (item=(censored due to no_log)) included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node3 => (item=(censored due to no_log)) included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node3 => (item=(censored due to no_log)) TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 18 January 2025 11:31:01 -0500 (0:00:00.133) 0:01:42.773 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": { "Container": { "ContainerName": "quadlet-basic-mysql-name", "Environment": [ "FOO=/bin/busybox-extras", "BAZ=test" ], "Image": "quay.io/linux-system-roles/mysql:5.6", "Network": "quadlet-basic.network", "PodmanArgs": "--secret=mysql_container_root_password,type=env,target=MYSQL_ROOT_PASSWORD --secret=json_secret,type=mount,target=/tmp/test.json", "Volume": "quadlet-basic-mysql.volume:/var/lib/mysql" }, "Install": { "WantedBy": "default.target" } }, "__podman_quadlet_str": "", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 18 January 2025 11:31:01 -0500 (0:00:00.041) 0:01:42.814 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "absent", "__podman_systemd_unit_scope": "", "__podman_user": "user_quadlet_basic" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 18 January 2025 11:31:01 -0500 (0:00:00.096) 0:01:42.911 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_quadlet_spec | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 18 January 2025 11:31:01 -0500 (0:00:00.047) 0:01:42.959 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-basic-mysql", "__podman_quadlet_type": "container", "__podman_rootless": true }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 18 January 2025 11:31:01 -0500 (0:00:00.079) 0:01:43.039 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 18 January 2025 11:31:01 -0500 (0:00:00.095) 0:01:43.134 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 18 January 2025 11:31:01 -0500 (0:00:00.059) 0:01:43.194 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 18 January 2025 11:31:02 -0500 (0:00:00.067) 0:01:43.261 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_group": "1111" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 18 January 2025 11:31:02 -0500 (0:00:00.078) 0:01:43.340 ****** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1737217683.9532003, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1737217679.1402063, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9160785, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "1643853349", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 18 January 2025 11:31:02 -0500 (0:00:00.439) 0:01:43.779 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "getsubids", "user_quadlet_basic" ], "delta": "0:00:00.004457", "end": "2025-01-18 11:31:02.881146", "rc": 0, "start": "2025-01-18 11:31:02.876689" } STDOUT: 0: user_quadlet_basic 589824 65536 TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 18 January 2025 11:31:02 -0500 (0:00:00.417) 0:01:44.197 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "getsubids", "-g", "user_quadlet_basic" ], "delta": "0:00:00.005474", "end": "2025-01-18 11:31:03.279805", "rc": 0, "start": "2025-01-18 11:31:03.274331" } STDOUT: 0: user_quadlet_basic 589824 65536 TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 18 January 2025 11:31:03 -0500 (0:00:00.395) 0:01:44.593 ****** ok: [managed-node3] => { "ansible_facts": { "podman_subgid_info": { "user_quadlet_basic": { "range": 65536, "start": 589824 } }, "podman_subuid_info": { "user_quadlet_basic": { "range": 65536, "start": 589824 } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 18 January 2025 11:31:03 -0500 (0:00:00.048) 0:01:44.641 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 18 January 2025 11:31:03 -0500 (0:00:00.031) 0:01:44.673 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 18 January 2025 11:31:03 -0500 (0:00:00.032) 0:01:44.706 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 18 January 2025 11:31:03 -0500 (0:00:00.031) 0:01:44.738 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 18 January 2025 11:31:03 -0500 (0:00:00.037) 0:01:44.775 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 18 January 2025 11:31:03 -0500 (0:00:00.035) 0:01:44.811 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [ "quay.io/linux-system-roles/mysql:5.6" ], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-basic-mysql.service", "__podman_systemd_scope": "user", "__podman_user_home_dir": "/home/user_quadlet_basic", "__podman_xdg_runtime_dir": "/run/user/1111" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 18 January 2025 11:31:03 -0500 (0:00:00.064) 0:01:44.875 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_path": "/home/user_quadlet_basic/.config/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 18 January 2025 11:31:03 -0500 (0:00:00.036) 0:01:44.911 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 18 January 2025 11:31:03 -0500 (0:00:00.030) 0:01:44.942 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_images": [ "quay.io/linux-system-roles/mysql:5.6" ], "__podman_quadlet_file": "/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-mysql.container", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 18 January 2025 11:31:03 -0500 (0:00:00.074) 0:01:45.016 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 18 January 2025 11:31:03 -0500 (0:00:00.038) 0:01:45.055 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4 Saturday 18 January 2025 11:31:03 -0500 (0:00:00.165) 0:01:45.221 ****** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1737217807.4361207, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "ctime": 1737217848.8281062, "dev": 43, "device_type": 0, "executable": true, "exists": true, "gid": 1111, "gr_name": "user_quadlet_basic", "inode": 1, "isblk": false, "ischr": false, "isdir": true, "isfifo": false, "isgid": false, "islnk": false, "isreg": false, "issock": false, "isuid": false, "mimetype": "inode/directory", "mode": "0700", "mtime": 1737217848.8281062, "nlink": 7, "path": "/run/user/1111", "pw_name": "user_quadlet_basic", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 160, "uid": 1111, "version": null, "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": true } } TASK [fedora.linux_system_roles.podman : Stop and disable service] ************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 Saturday 18 January 2025 11:31:04 -0500 (0:00:00.429) 0:01:45.651 ****** changed: [managed-node3] => { "changed": true, "enabled": false, "failed_when_result": false, "name": "quadlet-basic-mysql.service", "state": "stopped", "status": { "AccessSELinuxContext": "unconfined_u:object_r:user_tmp_t:s0", "ActiveEnterTimestamp": "Sat 2025-01-18 11:30:48 EST", "ActiveEnterTimestampMonotonic": "421895162", "ActiveExitTimestamp": "Sat 2025-01-18 11:30:48 EST", "ActiveExitTimestampMonotonic": "422033240", "ActiveState": "failed", "After": "podman-user-wait-network-online.service app.slice quadlet-basic-mysql-volume.service basic.target quadlet-basic-network.service -.mount run-user-1111.mount", "AllowIsolate": "no", "AssertResult": "yes", "AssertTimestamp": "Sat 2025-01-18 11:30:48 EST", "AssertTimestampMonotonic": "421514319", "Before": "default.target shutdown.target", "BindLogSockets": "no", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "274006000", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanLiveMount": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Sat 2025-01-18 11:30:48 EST", "ConditionTimestampMonotonic": "421514314", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroupId": "6167", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DebugInvocation": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "yes", "DelegateControllers": "cpu cpuset io memory pids", "Description": "quadlet-basic-mysql.service", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveMemoryHigh": "3698237440", "EffectiveMemoryMax": "3698237440", "EffectiveTasksMax": "22365", "Environment": "PODMAN_SYSTEMD_UNIT=quadlet-basic-mysql.service", "ExecMainCode": "1", "ExecMainExitTimestamp": "Sat 2025-01-18 11:30:48 EST", "ExecMainExitTimestampMonotonic": "422032419", "ExecMainHandoffTimestampMonotonic": "0", "ExecMainPID": "37573", "ExecMainStartTimestamp": "Sat 2025-01-18 11:30:48 EST", "ExecMainStartTimestampMonotonic": "421840001", "ExecMainStatus": "127", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman run --name quadlet-basic-mysql-name --cidfile=/run/user/1111/quadlet-basic-mysql.cid --replace --rm --cgroups=split --network quadlet-basic-name --sdnotify=conmon -d -v quadlet-basic-mysql-name:/var/lib/mysql --env BAZ=test --env FOO=/bin/busybox-extras --secret=mysql_container_root_password,type=env,target=MYSQL_ROOT_PASSWORD --secret=json_secret,type=mount,target=/tmp/test.json quay.io/linux-system-roles/mysql:5.6 ; ignore_errors=no ; start_time=[Sat 2025-01-18 11:30:48 EST] ; stop_time=[Sat 2025-01-18 11:30:48 EST] ; pid=37573 ; code=exited ; status=127 }", "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman run --name quadlet-basic-mysql-name --cidfile=/run/user/1111/quadlet-basic-mysql.cid --replace --rm --cgroups=split --network quadlet-basic-name --sdnotify=conmon -d -v quadlet-basic-mysql-name:/var/lib/mysql --env BAZ=test --env FOO=/bin/busybox-extras --secret=mysql_container_root_password,type=env,target=MYSQL_ROOT_PASSWORD --secret=json_secret,type=mount,target=/tmp/test.json quay.io/linux-system-roles/mysql:5.6 ; flags= ; start_time=[Sat 2025-01-18 11:30:48 EST] ; stop_time=[Sat 2025-01-18 11:30:48 EST] ; pid=37573 ; code=exited ; status=127 }", "ExecStop": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/user/1111/quadlet-basic-mysql.cid ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/user/1111/quadlet-basic-mysql.cid ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPost": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/user/1111/quadlet-basic-mysql.cid ; ignore_errors=yes ; start_time=[Sat 2025-01-18 11:30:48 EST] ; stop_time=[Sat 2025-01-18 11:30:48 EST] ; pid=37616 ; code=exited ; status=0 }", "ExecStopPostEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/user/1111/quadlet-basic-mysql.cid ; flags=ignore-failure ; start_time=[Sat 2025-01-18 11:30:48 EST] ; stop_time=[Sat 2025-01-18 11:30:48 EST] ; pid=37616 ; code=exited ; status=0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/run/user/1111/systemd/generator/quadlet-basic-mysql.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "quadlet-basic-mysql.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestamp": "Sat 2025-01-18 11:30:48 EST", "InactiveEnterTimestampMonotonic": "422066790", "InactiveExitTimestamp": "Sat 2025-01-18 11:30:48 EST", "InactiveExitTimestampMonotonic": "421515690", "InvocationID": "d6621ee6f1c5452ebc823e991a36759e", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "inherit", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13978", "LimitNPROCSoft": "13978", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13978", "LimitSIGPENDINGSoft": "13978", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LiveMountResult": "success", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureDurationUSec": "[not set]", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3362832384", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "38215680", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "[not set]", "MemorySwapMax": "infinity", "MemorySwapPeak": "0", "MemoryZSwapCurrent": "[not set]", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "quadlet-basic-mysql.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "all", "OOMPolicy": "continue", "OOMScoreAdjust": "200", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivatePIDs": "no", "PrivateTmp": "no", "PrivateTmpEx": "no", "PrivateUsers": "no", "PrivateUsersEx": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectControlGroupsEx": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "basic.target app.slice quadlet-basic-mysql-volume.service quadlet-basic-network.service", "RequiresMountsFor": "/run/user/1111/containers", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "exit-code", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "app.slice", "SourcePath": "/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-mysql.container", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestamp": "Sat 2025-01-18 11:30:48 EST", "StateChangeTimestampMonotonic": "422066790", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "failed", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-basic-mysql", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22365", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "WantedBy": "default.target", "Wants": "podman-user-wait-network-online.service", "WantsMountsFor": "/home/user_quadlet_basic", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0", "WorkingDirectory": "!/home/user_quadlet_basic" } } TASK [fedora.linux_system_roles.podman : See if quadlet file exists] *********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:33 Saturday 18 January 2025 11:31:05 -0500 (0:00:00.688) 0:01:46.339 ****** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1737217847.632106, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "0b6cac7929623f1059e78ef39b8b0a25169b28a6", "ctime": 1737217846.9361062, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 1111, "gr_name": "user_quadlet_basic", "inode": 364904699, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1737217846.6561062, "nlink": 1, "path": "/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-mysql.container", "pw_name": "user_quadlet_basic", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 448, "uid": 1111, "version": "3449294030", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:38 Saturday 18 January 2025 11:31:05 -0500 (0:00:00.416) 0:01:46.756 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Slurp quadlet file] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:6 Saturday 18 January 2025 11:31:05 -0500 (0:00:00.092) 0:01:46.849 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:12 Saturday 18 January 2025 11:31:06 -0500 (0:00:00.406) 0:01:47.255 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet yaml file] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:44 Saturday 18 January 2025 11:31:06 -0500 (0:00:00.051) 0:01:47.307 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Reset raw variable] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:52 Saturday 18 January 2025 11:31:06 -0500 (0:00:00.031) 0:01:47.339 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_raw": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Remove quadlet file] ****************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42 Saturday 18 January 2025 11:31:06 -0500 (0:00:00.030) 0:01:47.369 ****** changed: [managed-node3] => { "changed": true, "path": "/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-mysql.container", "state": "absent" } TASK [fedora.linux_system_roles.podman : Refresh systemd] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:48 Saturday 18 January 2025 11:31:06 -0500 (0:00:00.417) 0:01:47.787 ****** ok: [managed-node3] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Remove managed resource] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:58 Saturday 18 January 2025 11:31:07 -0500 (0:00:00.708) 0:01:48.495 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Remove volumes] *********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99 Saturday 18 January 2025 11:31:07 -0500 (0:00:00.537) 0:01:49.032 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Clear parsed podman variable] ********* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:116 Saturday 18 January 2025 11:31:07 -0500 (0:00:00.050) 0:01:49.083 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_parsed": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ******** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:120 Saturday 18 January 2025 11:31:07 -0500 (0:00:00.036) 0:01:49.119 ****** changed: [managed-node3] => { "changed": true, "cmd": [ "podman", "image", "prune", "--all", "-f" ], "delta": "0:00:00.265853", "end": "2025-01-18 11:31:08.522777", "rc": 0, "start": "2025-01-18 11:31:08.256924" } STDOUT: dd3b2a5dcb48ff61113592ed5ddd762581be4387c7bc552375a2159422aa6bf5 TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:131 Saturday 18 January 2025 11:31:08 -0500 (0:00:00.720) 0:01:49.840 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 18 January 2025 11:31:08 -0500 (0:00:00.053) 0:01:49.893 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_item_state | d('present') != 'absent'", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 18 January 2025 11:31:08 -0500 (0:00:00.032) 0:01:49.926 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_item_state | d('present') != 'absent'", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 18 January 2025 11:31:08 -0500 (0:00:00.030) 0:01:49.956 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_cancel_user_linger": [ "user_quadlet_basic" ] }, "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - images] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:141 Saturday 18 January 2025 11:31:08 -0500 (0:00:00.091) 0:01:50.048 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "images", "-n" ], "delta": "0:00:00.043559", "end": "2025-01-18 11:31:09.235315", "rc": 0, "start": "2025-01-18 11:31:09.191756" } TASK [fedora.linux_system_roles.podman : For testing and debugging - volumes] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:150 Saturday 18 January 2025 11:31:09 -0500 (0:00:00.504) 0:01:50.553 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "volume", "ls", "-n" ], "delta": "0:00:00.047617", "end": "2025-01-18 11:31:09.756573", "rc": 0, "start": "2025-01-18 11:31:09.708956" } STDOUT: local quadlet-basic-mysql-name local systemd-quadlet-basic-unused-volume TASK [fedora.linux_system_roles.podman : For testing and debugging - containers] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:159 Saturday 18 January 2025 11:31:09 -0500 (0:00:00.521) 0:01:51.075 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "ps", "--noheading" ], "delta": "0:00:00.042603", "end": "2025-01-18 11:31:10.260968", "rc": 0, "start": "2025-01-18 11:31:10.218365" } TASK [fedora.linux_system_roles.podman : For testing and debugging - networks] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:168 Saturday 18 January 2025 11:31:10 -0500 (0:00:00.503) 0:01:51.578 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "network", "ls", "-n", "-q" ], "delta": "0:00:00.042739", "end": "2025-01-18 11:31:10.768198", "rc": 0, "start": "2025-01-18 11:31:10.725459" } STDOUT: podman quadlet-basic-name systemd-quadlet-basic-unused-network TASK [fedora.linux_system_roles.podman : For testing and debugging - secrets] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:177 Saturday 18 January 2025 11:31:10 -0500 (0:00:00.507) 0:01:52.086 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - pods] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:187 Saturday 18 January 2025 11:31:11 -0500 (0:00:00.504) 0:01:52.590 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - services] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 Saturday 18 January 2025 11:31:11 -0500 (0:00:00.517) 0:01:53.108 ****** ok: [managed-node3] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "audit-rules.service": { "name": "audit-rules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autofs.service": { "name": "autofs.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "capsule@.service": { "name": "capsule@.service", "source": "systemd", "state": "unknown", "status": "static" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd-restricted.service": { "name": "chronyd-restricted.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-broker.service": { "name": "dbus-broker.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "active", "status": "alias" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd.service": { "name": "dhcpcd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd@.service": { "name": "dhcpcd@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "fips-crypto-policy-overlay.service": { "name": "fips-crypto-policy-overlay.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fsidd.service": { "name": "fsidd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "stopped", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "grub2-systemd-integration.service": { "name": "grub2-systemd-integration.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "hv_kvp_daemon.service": { "name": "hv_kvp_daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "logrotate.service": { "name": "logrotate.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm-devices-import.service": { "name": "lvm-devices-import.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "modprobe@.service": { "name": "modprobe@.service", "source": "systemd", "state": "unknown", "status": "static" }, "modprobe@configfs.service": { "name": "modprobe@configfs.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@dm_mod.service": { "name": "modprobe@dm_mod.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@drm.service": { "name": "modprobe@drm.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@efi_pstore.service": { "name": "modprobe@efi_pstore.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@fuse.service": { "name": "modprobe@fuse.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@loop.service": { "name": "modprobe@loop.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "netavark-dhcp-proxy.service": { "name": "netavark-dhcp-proxy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "netavark-firewalld-reload.service": { "name": "netavark-firewalld-reload.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "network.service": { "name": "network.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "static" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "pam_namespace.service": { "name": "pam_namespace.service", "source": "systemd", "state": "inactive", "status": "static" }, "pcscd.service": { "name": "pcscd.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "podman-auto-update.service": { "name": "podman-auto-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-clean-transient.service": { "name": "podman-clean-transient.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-kube@.service": { "name": "podman-kube@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "podman-restart.service": { "name": "podman-restart.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman.service": { "name": "podman.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "inactive", "status": "static" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quotaon-root.service": { "name": "quotaon-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "quotaon@.service": { "name": "quotaon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-svcgssd.service": { "name": "rpc-svcgssd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpmdb-migrate.service": { "name": "rpmdb-migrate.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rpmdb-rebuild.service": { "name": "rpmdb-rebuild.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "selinux-check-proper-disable.service": { "name": "selinux-check-proper-disable.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "serial-getty@ttyS0.service": { "name": "serial-getty@ttyS0.service", "source": "systemd", "state": "running", "status": "active" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ssh-host-keys-migration.service": { "name": "ssh-host-keys-migration.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-unix-local@.service": { "name": "sshd-unix-local@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-battery-check.service": { "name": "systemd-battery-check.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bless-boot.service": { "name": "systemd-bless-boot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-boot-check-no-failures.service": { "name": "systemd-boot-check-no-failures.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-boot-random-seed.service": { "name": "systemd-boot-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-boot-update.service": { "name": "systemd-boot-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-bootctl@.service": { "name": "systemd-bootctl@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-confext.service": { "name": "systemd-confext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-creds@.service": { "name": "systemd-creds@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-growfs-root.service": { "name": "systemd-growfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-growfs@.service": { "name": "systemd-growfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-clear.service": { "name": "systemd-hibernate-clear.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate-resume.service": { "name": "systemd-hibernate-resume.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald-sync@.service": { "name": "systemd-journald-sync@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-journald@.service": { "name": "systemd-journald@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-network-generator.service": { "name": "systemd-network-generator.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-networkd-wait-online.service": { "name": "systemd-networkd-wait-online.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-oomd.service": { "name": "systemd-oomd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-pcrextend@.service": { "name": "systemd-pcrextend@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrfs-root.service": { "name": "systemd-pcrfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pcrfs@.service": { "name": "systemd-pcrfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrlock-file-system.service": { "name": "systemd-pcrlock-file-system.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-code.service": { "name": "systemd-pcrlock-firmware-code.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-config.service": { "name": "systemd-pcrlock-firmware-config.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-machine-id.service": { "name": "systemd-pcrlock-machine-id.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-make-policy.service": { "name": "systemd-pcrlock-make-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-authority.service": { "name": "systemd-pcrlock-secureboot-authority.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-policy.service": { "name": "systemd-pcrlock-secureboot-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock@.service": { "name": "systemd-pcrlock@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrmachine.service": { "name": "systemd-pcrmachine.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-initrd.service": { "name": "systemd-pcrphase-initrd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-sysinit.service": { "name": "systemd-pcrphase-sysinit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase.service": { "name": "systemd-pcrphase.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-quotacheck-root.service": { "name": "systemd-quotacheck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-quotacheck@.service": { "name": "systemd-quotacheck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-repart.service": { "name": "systemd-repart.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-soft-reboot.service": { "name": "systemd-soft-reboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysext.service": { "name": "systemd-sysext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-sysext@.service": { "name": "systemd-sysext@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-sysupdate-reboot.service": { "name": "systemd-sysupdate-reboot.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysupdate.service": { "name": "systemd-sysupdate.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev-early.service": { "name": "systemd-tmpfiles-setup-dev-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup-early.service": { "name": "systemd-tpm2-setup-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup.service": { "name": "systemd-tpm2-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-load-credentials.service": { "name": "systemd-udev-load-credentials.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "active" }, "user-runtime-dir@1111.service": { "name": "user-runtime-dir@1111.service", "source": "systemd", "state": "stopped", "status": "active" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "active" }, "user@1111.service": { "name": "user@1111.service", "source": "systemd", "state": "running", "status": "active" }, "ypbind.service": { "name": "ypbind.service", "source": "systemd", "state": "stopped", "status": "not-found" } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 18 January 2025 11:31:16 -0500 (0:00:04.207) 0:01:57.315 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 18 January 2025 11:31:16 -0500 (0:00:00.033) 0:01:57.349 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": { "Volume": {} }, "__podman_quadlet_str": "", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 18 January 2025 11:31:16 -0500 (0:00:00.043) 0:01:57.392 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "absent", "__podman_systemd_unit_scope": "", "__podman_user": "user_quadlet_basic" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 18 January 2025 11:31:16 -0500 (0:00:00.042) 0:01:57.435 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_quadlet_spec | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 18 January 2025 11:31:16 -0500 (0:00:00.033) 0:01:57.468 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-basic-unused-volume", "__podman_quadlet_type": "volume", "__podman_rootless": true }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 18 January 2025 11:31:16 -0500 (0:00:00.050) 0:01:57.518 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 18 January 2025 11:31:16 -0500 (0:00:00.061) 0:01:57.580 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 18 January 2025 11:31:16 -0500 (0:00:00.039) 0:01:57.619 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 18 January 2025 11:31:16 -0500 (0:00:00.036) 0:01:57.656 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_group": "1111" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 18 January 2025 11:31:16 -0500 (0:00:00.048) 0:01:57.704 ****** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1737217683.9532003, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1737217679.1402063, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9160785, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "1643853349", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 18 January 2025 11:31:16 -0500 (0:00:00.397) 0:01:58.102 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "getsubids", "user_quadlet_basic" ], "delta": "0:00:00.004554", "end": "2025-01-18 11:31:17.178657", "rc": 0, "start": "2025-01-18 11:31:17.174103" } STDOUT: 0: user_quadlet_basic 589824 65536 TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 18 January 2025 11:31:17 -0500 (0:00:00.449) 0:01:58.551 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "getsubids", "-g", "user_quadlet_basic" ], "delta": "0:00:00.005571", "end": "2025-01-18 11:31:17.629052", "rc": 0, "start": "2025-01-18 11:31:17.623481" } STDOUT: 0: user_quadlet_basic 589824 65536 TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 18 January 2025 11:31:17 -0500 (0:00:00.398) 0:01:58.950 ****** ok: [managed-node3] => { "ansible_facts": { "podman_subgid_info": { "user_quadlet_basic": { "range": 65536, "start": 589824 } }, "podman_subuid_info": { "user_quadlet_basic": { "range": 65536, "start": 589824 } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 18 January 2025 11:31:17 -0500 (0:00:00.050) 0:01:59.000 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 18 January 2025 11:31:17 -0500 (0:00:00.037) 0:01:59.037 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 18 January 2025 11:31:17 -0500 (0:00:00.040) 0:01:59.077 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 18 January 2025 11:31:17 -0500 (0:00:00.057) 0:01:59.135 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 18 January 2025 11:31:17 -0500 (0:00:00.063) 0:01:59.199 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 18 January 2025 11:31:17 -0500 (0:00:00.041) 0:01:59.240 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-basic-unused-volume-volume.service", "__podman_systemd_scope": "user", "__podman_user_home_dir": "/home/user_quadlet_basic", "__podman_xdg_runtime_dir": "/run/user/1111" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 18 January 2025 11:31:18 -0500 (0:00:00.066) 0:01:59.307 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_path": "/home/user_quadlet_basic/.config/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 18 January 2025 11:31:18 -0500 (0:00:00.044) 0:01:59.351 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 18 January 2025 11:31:18 -0500 (0:00:00.034) 0:01:59.385 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-unused-volume.volume", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 18 January 2025 11:31:18 -0500 (0:00:00.077) 0:01:59.463 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 18 January 2025 11:31:18 -0500 (0:00:00.040) 0:01:59.503 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4 Saturday 18 January 2025 11:31:18 -0500 (0:00:00.094) 0:01:59.598 ****** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1737217807.4361207, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "ctime": 1737217848.8281062, "dev": 43, "device_type": 0, "executable": true, "exists": true, "gid": 1111, "gr_name": "user_quadlet_basic", "inode": 1, "isblk": false, "ischr": false, "isdir": true, "isfifo": false, "isgid": false, "islnk": false, "isreg": false, "issock": false, "isuid": false, "mimetype": "inode/directory", "mode": "0700", "mtime": 1737217848.8281062, "nlink": 7, "path": "/run/user/1111", "pw_name": "user_quadlet_basic", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 160, "uid": 1111, "version": null, "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": true } } TASK [fedora.linux_system_roles.podman : Stop and disable service] ************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 Saturday 18 January 2025 11:31:18 -0500 (0:00:00.423) 0:02:00.021 ****** changed: [managed-node3] => { "changed": true, "enabled": false, "failed_when_result": false, "name": "quadlet-basic-unused-volume-volume.service", "state": "stopped", "status": { "AccessSELinuxContext": "unconfined_u:object_r:user_tmp_t:s0", "ActiveEnterTimestamp": "Sat 2025-01-18 11:30:34 EST", "ActiveEnterTimestampMonotonic": "408109970", "ActiveExitTimestampMonotonic": "0", "ActiveState": "active", "After": "podman-user-wait-network-online.service -.mount basic.target run-user-1111.mount app.slice", "AllowIsolate": "no", "AssertResult": "yes", "AssertTimestamp": "Sat 2025-01-18 11:30:34 EST", "AssertTimestampMonotonic": "408063063", "Before": "shutdown.target", "BindLogSockets": "no", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "37062000", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanLiveMount": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Sat 2025-01-18 11:30:34 EST", "ConditionTimestampMonotonic": "408063058", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroupId": "5983", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DebugInvocation": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "quadlet-basic-unused-volume-volume.service", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveMemoryHigh": "3698237440", "EffectiveMemoryMax": "3698237440", "EffectiveTasksMax": "22365", "ExecMainCode": "1", "ExecMainExitTimestamp": "Sat 2025-01-18 11:30:34 EST", "ExecMainExitTimestampMonotonic": "408109764", "ExecMainHandoffTimestamp": "Sat 2025-01-18 11:30:34 EST", "ExecMainHandoffTimestampMonotonic": "408073358", "ExecMainPID": "35876", "ExecMainStartTimestamp": "Sat 2025-01-18 11:30:34 EST", "ExecMainStartTimestampMonotonic": "408063651", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman volume create --ignore systemd-quadlet-basic-unused-volume ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman volume create --ignore systemd-quadlet-basic-unused-volume ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/run/user/1111/systemd/generator/quadlet-basic-unused-volume-volume.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "quadlet-basic-unused-volume-volume.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestamp": "Sat 2025-01-18 11:30:34 EST", "InactiveExitTimestampMonotonic": "408064171", "InvocationID": "1a413e509fc945c8be6f298024a0ec6a", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "inherit", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13978", "LimitNPROCSoft": "13978", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13978", "LimitSIGPENDINGSoft": "13978", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LiveMountResult": "success", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureDurationUSec": "[not set]", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3683442688", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "16527360", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "[not set]", "MemorySwapMax": "infinity", "MemorySwapPeak": "0", "MemoryZSwapCurrent": "[not set]", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "quadlet-basic-unused-volume-volume.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "200", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivatePIDs": "no", "PrivateTmp": "no", "PrivateTmpEx": "no", "PrivateUsers": "no", "PrivateUsersEx": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectControlGroupsEx": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "yes", "RemoveIPC": "no", "Requires": "app.slice basic.target", "RequiresMountsFor": "/run/user/1111/containers", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "app.slice", "SourcePath": "/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-unused-volume.volume", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestamp": "Sat 2025-01-18 11:30:34 EST", "StateChangeTimestampMonotonic": "408109970", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "exited", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-basic-unused-volume-volume", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22365", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "infinity", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "oneshot", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "Wants": "podman-user-wait-network-online.service", "WantsMountsFor": "/home/user_quadlet_basic", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0", "WorkingDirectory": "!/home/user_quadlet_basic" } } TASK [fedora.linux_system_roles.podman : See if quadlet file exists] *********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:33 Saturday 18 January 2025 11:31:19 -0500 (0:00:00.700) 0:02:00.721 ****** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1737217834.1991076, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "fd0ae560360afa5541b866560b1e849d25e216ef", "ctime": 1737217833.589108, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 1111, "gr_name": "user_quadlet_basic", "inode": 229902, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1737217833.309108, "nlink": 1, "path": "/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-unused-volume.volume", "pw_name": "user_quadlet_basic", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 53, "uid": 1111, "version": "1109899848", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:38 Saturday 18 January 2025 11:31:19 -0500 (0:00:00.404) 0:02:01.126 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Slurp quadlet file] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:6 Saturday 18 January 2025 11:31:20 -0500 (0:00:00.146) 0:02:01.273 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:12 Saturday 18 January 2025 11:31:20 -0500 (0:00:00.380) 0:02:01.653 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet yaml file] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:44 Saturday 18 January 2025 11:31:20 -0500 (0:00:00.052) 0:02:01.706 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Reset raw variable] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:52 Saturday 18 January 2025 11:31:20 -0500 (0:00:00.035) 0:02:01.742 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_raw": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Remove quadlet file] ****************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42 Saturday 18 January 2025 11:31:20 -0500 (0:00:00.033) 0:02:01.776 ****** changed: [managed-node3] => { "changed": true, "path": "/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-unused-volume.volume", "state": "absent" } TASK [fedora.linux_system_roles.podman : Refresh systemd] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:48 Saturday 18 January 2025 11:31:20 -0500 (0:00:00.415) 0:02:02.191 ****** ok: [managed-node3] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Remove managed resource] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:58 Saturday 18 January 2025 11:31:21 -0500 (0:00:00.696) 0:02:02.887 ****** changed: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } TASK [fedora.linux_system_roles.podman : Remove volumes] *********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99 Saturday 18 January 2025 11:31:22 -0500 (0:00:00.603) 0:02:03.491 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Clear parsed podman variable] ********* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:116 Saturday 18 January 2025 11:31:22 -0500 (0:00:00.075) 0:02:03.566 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_parsed": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ******** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:120 Saturday 18 January 2025 11:31:22 -0500 (0:00:00.058) 0:02:03.624 ****** changed: [managed-node3] => { "changed": true, "cmd": [ "podman", "image", "prune", "--all", "-f" ], "delta": "0:00:00.049493", "end": "2025-01-18 11:31:22.849040", "rc": 0, "start": "2025-01-18 11:31:22.799547" } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:131 Saturday 18 January 2025 11:31:22 -0500 (0:00:00.546) 0:02:04.171 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 18 January 2025 11:31:22 -0500 (0:00:00.059) 0:02:04.230 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_item_state | d('present') != 'absent'", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 18 January 2025 11:31:23 -0500 (0:00:00.036) 0:02:04.267 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_item_state | d('present') != 'absent'", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 18 January 2025 11:31:23 -0500 (0:00:00.047) 0:02:04.314 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_cancel_user_linger": [ "user_quadlet_basic" ] }, "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - images] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:141 Saturday 18 January 2025 11:31:23 -0500 (0:00:00.059) 0:02:04.374 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "images", "-n" ], "delta": "0:00:00.046083", "end": "2025-01-18 11:31:23.565989", "rc": 0, "start": "2025-01-18 11:31:23.519906" } TASK [fedora.linux_system_roles.podman : For testing and debugging - volumes] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:150 Saturday 18 January 2025 11:31:23 -0500 (0:00:00.511) 0:02:04.885 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "volume", "ls", "-n" ], "delta": "0:00:00.044948", "end": "2025-01-18 11:31:24.074304", "rc": 0, "start": "2025-01-18 11:31:24.029356" } STDOUT: local quadlet-basic-mysql-name TASK [fedora.linux_system_roles.podman : For testing and debugging - containers] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:159 Saturday 18 January 2025 11:31:24 -0500 (0:00:00.568) 0:02:05.453 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "ps", "--noheading" ], "delta": "0:00:00.041731", "end": "2025-01-18 11:31:24.634728", "rc": 0, "start": "2025-01-18 11:31:24.592997" } TASK [fedora.linux_system_roles.podman : For testing and debugging - networks] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:168 Saturday 18 January 2025 11:31:24 -0500 (0:00:00.519) 0:02:05.973 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "network", "ls", "-n", "-q" ], "delta": "0:00:00.041858", "end": "2025-01-18 11:31:25.204107", "rc": 0, "start": "2025-01-18 11:31:25.162249" } STDOUT: podman quadlet-basic-name systemd-quadlet-basic-unused-network TASK [fedora.linux_system_roles.podman : For testing and debugging - secrets] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:177 Saturday 18 January 2025 11:31:25 -0500 (0:00:00.559) 0:02:06.532 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - pods] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:187 Saturday 18 January 2025 11:31:25 -0500 (0:00:00.541) 0:02:07.074 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - services] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 Saturday 18 January 2025 11:31:26 -0500 (0:00:00.575) 0:02:07.650 ****** ok: [managed-node3] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "audit-rules.service": { "name": "audit-rules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autofs.service": { "name": "autofs.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "capsule@.service": { "name": "capsule@.service", "source": "systemd", "state": "unknown", "status": "static" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd-restricted.service": { "name": "chronyd-restricted.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-broker.service": { "name": "dbus-broker.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "active", "status": "alias" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd.service": { "name": "dhcpcd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd@.service": { "name": "dhcpcd@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "fips-crypto-policy-overlay.service": { "name": "fips-crypto-policy-overlay.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fsidd.service": { "name": "fsidd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "stopped", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "grub2-systemd-integration.service": { "name": "grub2-systemd-integration.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "hv_kvp_daemon.service": { "name": "hv_kvp_daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "logrotate.service": { "name": "logrotate.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm-devices-import.service": { "name": "lvm-devices-import.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "modprobe@.service": { "name": "modprobe@.service", "source": "systemd", "state": "unknown", "status": "static" }, "modprobe@configfs.service": { "name": "modprobe@configfs.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@dm_mod.service": { "name": "modprobe@dm_mod.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@drm.service": { "name": "modprobe@drm.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@efi_pstore.service": { "name": "modprobe@efi_pstore.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@fuse.service": { "name": "modprobe@fuse.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@loop.service": { "name": "modprobe@loop.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "netavark-dhcp-proxy.service": { "name": "netavark-dhcp-proxy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "netavark-firewalld-reload.service": { "name": "netavark-firewalld-reload.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "network.service": { "name": "network.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "static" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "pam_namespace.service": { "name": "pam_namespace.service", "source": "systemd", "state": "inactive", "status": "static" }, "pcscd.service": { "name": "pcscd.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "podman-auto-update.service": { "name": "podman-auto-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-clean-transient.service": { "name": "podman-clean-transient.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-kube@.service": { "name": "podman-kube@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "podman-restart.service": { "name": "podman-restart.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman.service": { "name": "podman.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "inactive", "status": "static" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quotaon-root.service": { "name": "quotaon-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "quotaon@.service": { "name": "quotaon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-svcgssd.service": { "name": "rpc-svcgssd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpmdb-migrate.service": { "name": "rpmdb-migrate.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rpmdb-rebuild.service": { "name": "rpmdb-rebuild.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "selinux-check-proper-disable.service": { "name": "selinux-check-proper-disable.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "serial-getty@ttyS0.service": { "name": "serial-getty@ttyS0.service", "source": "systemd", "state": "running", "status": "active" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ssh-host-keys-migration.service": { "name": "ssh-host-keys-migration.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-unix-local@.service": { "name": "sshd-unix-local@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-battery-check.service": { "name": "systemd-battery-check.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bless-boot.service": { "name": "systemd-bless-boot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-boot-check-no-failures.service": { "name": "systemd-boot-check-no-failures.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-boot-random-seed.service": { "name": "systemd-boot-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-boot-update.service": { "name": "systemd-boot-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-bootctl@.service": { "name": "systemd-bootctl@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-confext.service": { "name": "systemd-confext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-creds@.service": { "name": "systemd-creds@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-growfs-root.service": { "name": "systemd-growfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-growfs@.service": { "name": "systemd-growfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-clear.service": { "name": "systemd-hibernate-clear.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate-resume.service": { "name": "systemd-hibernate-resume.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald-sync@.service": { "name": "systemd-journald-sync@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-journald@.service": { "name": "systemd-journald@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-network-generator.service": { "name": "systemd-network-generator.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-networkd-wait-online.service": { "name": "systemd-networkd-wait-online.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-oomd.service": { "name": "systemd-oomd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-pcrextend@.service": { "name": "systemd-pcrextend@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrfs-root.service": { "name": "systemd-pcrfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pcrfs@.service": { "name": "systemd-pcrfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrlock-file-system.service": { "name": "systemd-pcrlock-file-system.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-code.service": { "name": "systemd-pcrlock-firmware-code.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-config.service": { "name": "systemd-pcrlock-firmware-config.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-machine-id.service": { "name": "systemd-pcrlock-machine-id.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-make-policy.service": { "name": "systemd-pcrlock-make-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-authority.service": { "name": "systemd-pcrlock-secureboot-authority.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-policy.service": { "name": "systemd-pcrlock-secureboot-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock@.service": { "name": "systemd-pcrlock@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrmachine.service": { "name": "systemd-pcrmachine.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-initrd.service": { "name": "systemd-pcrphase-initrd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-sysinit.service": { "name": "systemd-pcrphase-sysinit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase.service": { "name": "systemd-pcrphase.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-quotacheck-root.service": { "name": "systemd-quotacheck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-quotacheck@.service": { "name": "systemd-quotacheck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-repart.service": { "name": "systemd-repart.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-soft-reboot.service": { "name": "systemd-soft-reboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysext.service": { "name": "systemd-sysext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-sysext@.service": { "name": "systemd-sysext@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-sysupdate-reboot.service": { "name": "systemd-sysupdate-reboot.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysupdate.service": { "name": "systemd-sysupdate.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev-early.service": { "name": "systemd-tmpfiles-setup-dev-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup-early.service": { "name": "systemd-tpm2-setup-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup.service": { "name": "systemd-tpm2-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-load-credentials.service": { "name": "systemd-udev-load-credentials.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "active" }, "user-runtime-dir@1111.service": { "name": "user-runtime-dir@1111.service", "source": "systemd", "state": "stopped", "status": "active" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "active" }, "user@1111.service": { "name": "user@1111.service", "source": "systemd", "state": "running", "status": "active" }, "ypbind.service": { "name": "ypbind.service", "source": "systemd", "state": "stopped", "status": "not-found" } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 18 January 2025 11:31:29 -0500 (0:00:03.075) 0:02:10.725 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 18 January 2025 11:31:29 -0500 (0:00:00.057) 0:02:10.782 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": { "Volume": { "VolumeName": "quadlet-basic-mysql-name" } }, "__podman_quadlet_str": "", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 18 January 2025 11:31:29 -0500 (0:00:00.073) 0:02:10.856 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "absent", "__podman_systemd_unit_scope": "", "__podman_user": "user_quadlet_basic" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 18 January 2025 11:31:29 -0500 (0:00:00.071) 0:02:10.927 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_quadlet_spec | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 18 January 2025 11:31:29 -0500 (0:00:00.058) 0:02:10.985 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-basic-mysql", "__podman_quadlet_type": "volume", "__podman_rootless": true }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 18 January 2025 11:31:29 -0500 (0:00:00.082) 0:02:11.067 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 18 January 2025 11:31:29 -0500 (0:00:00.090) 0:02:11.158 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 18 January 2025 11:31:29 -0500 (0:00:00.046) 0:02:11.204 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 18 January 2025 11:31:30 -0500 (0:00:00.046) 0:02:11.251 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_group": "1111" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 18 January 2025 11:31:30 -0500 (0:00:00.048) 0:02:11.300 ****** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1737217683.9532003, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1737217679.1402063, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9160785, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "1643853349", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 18 January 2025 11:31:30 -0500 (0:00:00.430) 0:02:11.731 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "getsubids", "user_quadlet_basic" ], "delta": "0:00:00.004520", "end": "2025-01-18 11:31:30.833184", "rc": 0, "start": "2025-01-18 11:31:30.828664" } STDOUT: 0: user_quadlet_basic 589824 65536 TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 18 January 2025 11:31:30 -0500 (0:00:00.418) 0:02:12.149 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "getsubids", "-g", "user_quadlet_basic" ], "delta": "0:00:00.005117", "end": "2025-01-18 11:31:31.224653", "rc": 0, "start": "2025-01-18 11:31:31.219536" } STDOUT: 0: user_quadlet_basic 589824 65536 TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 18 January 2025 11:31:31 -0500 (0:00:00.459) 0:02:12.608 ****** ok: [managed-node3] => { "ansible_facts": { "podman_subgid_info": { "user_quadlet_basic": { "range": 65536, "start": 589824 } }, "podman_subuid_info": { "user_quadlet_basic": { "range": 65536, "start": 589824 } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 18 January 2025 11:31:31 -0500 (0:00:00.051) 0:02:12.660 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 18 January 2025 11:31:31 -0500 (0:00:00.037) 0:02:12.697 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 18 January 2025 11:31:31 -0500 (0:00:00.042) 0:02:12.740 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 18 January 2025 11:31:31 -0500 (0:00:00.057) 0:02:12.797 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 18 January 2025 11:31:31 -0500 (0:00:00.046) 0:02:12.844 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 18 January 2025 11:31:31 -0500 (0:00:00.042) 0:02:12.886 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-basic-mysql-volume.service", "__podman_systemd_scope": "user", "__podman_user_home_dir": "/home/user_quadlet_basic", "__podman_xdg_runtime_dir": "/run/user/1111" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 18 January 2025 11:31:31 -0500 (0:00:00.070) 0:02:12.956 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_path": "/home/user_quadlet_basic/.config/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 18 January 2025 11:31:31 -0500 (0:00:00.039) 0:02:12.996 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 18 January 2025 11:31:31 -0500 (0:00:00.034) 0:02:13.030 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-mysql.volume", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 18 January 2025 11:31:31 -0500 (0:00:00.079) 0:02:13.110 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 18 January 2025 11:31:31 -0500 (0:00:00.040) 0:02:13.151 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4 Saturday 18 January 2025 11:31:31 -0500 (0:00:00.078) 0:02:13.229 ****** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1737217807.4361207, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "ctime": 1737217848.8281062, "dev": 43, "device_type": 0, "executable": true, "exists": true, "gid": 1111, "gr_name": "user_quadlet_basic", "inode": 1, "isblk": false, "ischr": false, "isdir": true, "isfifo": false, "isgid": false, "islnk": false, "isreg": false, "issock": false, "isuid": false, "mimetype": "inode/directory", "mode": "0700", "mtime": 1737217848.8281062, "nlink": 7, "path": "/run/user/1111", "pw_name": "user_quadlet_basic", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 160, "uid": 1111, "version": null, "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": true } } TASK [fedora.linux_system_roles.podman : Stop and disable service] ************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 Saturday 18 January 2025 11:31:32 -0500 (0:00:00.396) 0:02:13.626 ****** changed: [managed-node3] => { "changed": true, "enabled": false, "failed_when_result": false, "name": "quadlet-basic-mysql-volume.service", "state": "stopped", "status": { "AccessSELinuxContext": "unconfined_u:object_r:user_tmp_t:s0", "ActiveEnterTimestamp": "Sat 2025-01-18 11:30:29 EST", "ActiveEnterTimestampMonotonic": "402653875", "ActiveExitTimestampMonotonic": "0", "ActiveState": "active", "After": "podman-user-wait-network-online.service app.slice run-user-1111.mount basic.target -.mount", "AllowIsolate": "no", "AssertResult": "yes", "AssertTimestamp": "Sat 2025-01-18 11:30:29 EST", "AssertTimestampMonotonic": "402606597", "Before": "shutdown.target", "BindLogSockets": "no", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "33253000", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanLiveMount": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Sat 2025-01-18 11:30:29 EST", "ConditionTimestampMonotonic": "402606593", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroupId": "5944", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DebugInvocation": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "quadlet-basic-mysql-volume.service", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveMemoryHigh": "3698237440", "EffectiveMemoryMax": "3698237440", "EffectiveTasksMax": "22365", "ExecMainCode": "1", "ExecMainExitTimestamp": "Sat 2025-01-18 11:30:29 EST", "ExecMainExitTimestampMonotonic": "402653631", "ExecMainHandoffTimestamp": "Sat 2025-01-18 11:30:29 EST", "ExecMainHandoffTimestampMonotonic": "402619383", "ExecMainPID": "34608", "ExecMainStartTimestamp": "Sat 2025-01-18 11:30:29 EST", "ExecMainStartTimestampMonotonic": "402607270", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman volume create --ignore quadlet-basic-mysql-name ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman volume create --ignore quadlet-basic-mysql-name ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/run/user/1111/systemd/generator/quadlet-basic-mysql-volume.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "quadlet-basic-mysql-volume.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestamp": "Sat 2025-01-18 11:30:29 EST", "InactiveExitTimestampMonotonic": "402607744", "InvocationID": "296604b019224163a55c4b21414ccd13", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "inherit", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13978", "LimitNPROCSoft": "13978", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13978", "LimitSIGPENDINGSoft": "13978", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LiveMountResult": "success", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureDurationUSec": "[not set]", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3687923712", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "16457728", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "[not set]", "MemorySwapMax": "infinity", "MemorySwapPeak": "0", "MemoryZSwapCurrent": "[not set]", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "quadlet-basic-mysql-volume.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "200", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivatePIDs": "no", "PrivateTmp": "no", "PrivateTmpEx": "no", "PrivateUsers": "no", "PrivateUsersEx": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectControlGroupsEx": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "yes", "RemoveIPC": "no", "Requires": "app.slice basic.target", "RequiresMountsFor": "/run/user/1111/containers", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "app.slice", "SourcePath": "/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-mysql.volume", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestamp": "Sat 2025-01-18 11:30:29 EST", "StateChangeTimestampMonotonic": "402653875", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "exited", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-basic-mysql-volume", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22365", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "infinity", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "oneshot", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "Wants": "podman-user-wait-network-online.service", "WantsMountsFor": "/home/user_quadlet_basic", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0", "WorkingDirectory": "!/home/user_quadlet_basic" } } TASK [fedora.linux_system_roles.podman : See if quadlet file exists] *********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:33 Saturday 18 January 2025 11:31:33 -0500 (0:00:00.691) 0:02:14.317 ****** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1737217828.7461102, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "90a3571bfc7670328fe3f8fb625585613dbd9c4a", "ctime": 1737217828.1011107, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 1111, "gr_name": "user_quadlet_basic", "inode": 612368614, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1737217827.7931108, "nlink": 1, "path": "/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-mysql.volume", "pw_name": "user_quadlet_basic", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 89, "uid": 1111, "version": "1313473380", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:38 Saturday 18 January 2025 11:31:33 -0500 (0:00:00.399) 0:02:14.716 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Slurp quadlet file] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:6 Saturday 18 January 2025 11:31:33 -0500 (0:00:00.118) 0:02:14.835 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:12 Saturday 18 January 2025 11:31:33 -0500 (0:00:00.382) 0:02:15.217 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet yaml file] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:44 Saturday 18 January 2025 11:31:34 -0500 (0:00:00.053) 0:02:15.271 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Reset raw variable] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:52 Saturday 18 January 2025 11:31:34 -0500 (0:00:00.036) 0:02:15.307 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_raw": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Remove quadlet file] ****************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42 Saturday 18 January 2025 11:31:34 -0500 (0:00:00.034) 0:02:15.342 ****** changed: [managed-node3] => { "changed": true, "path": "/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-mysql.volume", "state": "absent" } TASK [fedora.linux_system_roles.podman : Refresh systemd] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:48 Saturday 18 January 2025 11:31:34 -0500 (0:00:00.397) 0:02:15.740 ****** ok: [managed-node3] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Remove managed resource] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:58 Saturday 18 January 2025 11:31:35 -0500 (0:00:00.657) 0:02:16.397 ****** changed: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } TASK [fedora.linux_system_roles.podman : Remove volumes] *********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99 Saturday 18 January 2025 11:31:35 -0500 (0:00:00.526) 0:02:16.924 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Clear parsed podman variable] ********* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:116 Saturday 18 January 2025 11:31:35 -0500 (0:00:00.046) 0:02:16.971 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_parsed": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ******** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:120 Saturday 18 January 2025 11:31:35 -0500 (0:00:00.035) 0:02:17.006 ****** changed: [managed-node3] => { "changed": true, "cmd": [ "podman", "image", "prune", "--all", "-f" ], "delta": "0:00:00.043346", "end": "2025-01-18 11:31:36.190595", "rc": 0, "start": "2025-01-18 11:31:36.147249" } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:131 Saturday 18 January 2025 11:31:36 -0500 (0:00:00.505) 0:02:17.511 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 18 January 2025 11:31:36 -0500 (0:00:00.059) 0:02:17.571 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_item_state | d('present') != 'absent'", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 18 January 2025 11:31:36 -0500 (0:00:00.035) 0:02:17.607 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_item_state | d('present') != 'absent'", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 18 January 2025 11:31:36 -0500 (0:00:00.049) 0:02:17.656 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_cancel_user_linger": [ "user_quadlet_basic" ] }, "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - images] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:141 Saturday 18 January 2025 11:31:36 -0500 (0:00:00.051) 0:02:17.708 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "images", "-n" ], "delta": "0:00:00.042897", "end": "2025-01-18 11:31:36.887620", "rc": 0, "start": "2025-01-18 11:31:36.844723" } TASK [fedora.linux_system_roles.podman : For testing and debugging - volumes] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:150 Saturday 18 January 2025 11:31:36 -0500 (0:00:00.497) 0:02:18.206 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "volume", "ls", "-n" ], "delta": "0:00:00.042794", "end": "2025-01-18 11:31:37.384963", "rc": 0, "start": "2025-01-18 11:31:37.342169" } TASK [fedora.linux_system_roles.podman : For testing and debugging - containers] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:159 Saturday 18 January 2025 11:31:37 -0500 (0:00:00.520) 0:02:18.726 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "ps", "--noheading" ], "delta": "0:00:00.040258", "end": "2025-01-18 11:31:37.908837", "rc": 0, "start": "2025-01-18 11:31:37.868579" } TASK [fedora.linux_system_roles.podman : For testing and debugging - networks] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:168 Saturday 18 January 2025 11:31:37 -0500 (0:00:00.501) 0:02:19.228 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "network", "ls", "-n", "-q" ], "delta": "0:00:00.048979", "end": "2025-01-18 11:31:38.418289", "rc": 0, "start": "2025-01-18 11:31:38.369310" } STDOUT: podman quadlet-basic-name systemd-quadlet-basic-unused-network TASK [fedora.linux_system_roles.podman : For testing and debugging - secrets] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:177 Saturday 18 January 2025 11:31:38 -0500 (0:00:00.586) 0:02:19.815 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - pods] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:187 Saturday 18 January 2025 11:31:39 -0500 (0:00:00.501) 0:02:20.316 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - services] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 Saturday 18 January 2025 11:31:39 -0500 (0:00:00.525) 0:02:20.842 ****** ok: [managed-node3] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "audit-rules.service": { "name": "audit-rules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autofs.service": { "name": "autofs.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "capsule@.service": { "name": "capsule@.service", "source": "systemd", "state": "unknown", "status": "static" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd-restricted.service": { "name": "chronyd-restricted.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-broker.service": { "name": "dbus-broker.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "active", "status": "alias" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd.service": { "name": "dhcpcd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd@.service": { "name": "dhcpcd@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "fips-crypto-policy-overlay.service": { "name": "fips-crypto-policy-overlay.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fsidd.service": { "name": "fsidd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "stopped", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "grub2-systemd-integration.service": { "name": "grub2-systemd-integration.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "hv_kvp_daemon.service": { "name": "hv_kvp_daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "logrotate.service": { "name": "logrotate.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm-devices-import.service": { "name": "lvm-devices-import.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "modprobe@.service": { "name": "modprobe@.service", "source": "systemd", "state": "unknown", "status": "static" }, "modprobe@configfs.service": { "name": "modprobe@configfs.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@dm_mod.service": { "name": "modprobe@dm_mod.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@drm.service": { "name": "modprobe@drm.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@efi_pstore.service": { "name": "modprobe@efi_pstore.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@fuse.service": { "name": "modprobe@fuse.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@loop.service": { "name": "modprobe@loop.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "netavark-dhcp-proxy.service": { "name": "netavark-dhcp-proxy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "netavark-firewalld-reload.service": { "name": "netavark-firewalld-reload.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "network.service": { "name": "network.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "static" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "pam_namespace.service": { "name": "pam_namespace.service", "source": "systemd", "state": "inactive", "status": "static" }, "pcscd.service": { "name": "pcscd.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "podman-auto-update.service": { "name": "podman-auto-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-clean-transient.service": { "name": "podman-clean-transient.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-kube@.service": { "name": "podman-kube@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "podman-restart.service": { "name": "podman-restart.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman.service": { "name": "podman.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "inactive", "status": "static" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quotaon-root.service": { "name": "quotaon-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "quotaon@.service": { "name": "quotaon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-svcgssd.service": { "name": "rpc-svcgssd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpmdb-migrate.service": { "name": "rpmdb-migrate.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rpmdb-rebuild.service": { "name": "rpmdb-rebuild.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "selinux-check-proper-disable.service": { "name": "selinux-check-proper-disable.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "serial-getty@ttyS0.service": { "name": "serial-getty@ttyS0.service", "source": "systemd", "state": "running", "status": "active" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ssh-host-keys-migration.service": { "name": "ssh-host-keys-migration.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-unix-local@.service": { "name": "sshd-unix-local@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-battery-check.service": { "name": "systemd-battery-check.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bless-boot.service": { "name": "systemd-bless-boot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-boot-check-no-failures.service": { "name": "systemd-boot-check-no-failures.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-boot-random-seed.service": { "name": "systemd-boot-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-boot-update.service": { "name": "systemd-boot-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-bootctl@.service": { "name": "systemd-bootctl@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-confext.service": { "name": "systemd-confext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-creds@.service": { "name": "systemd-creds@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-growfs-root.service": { "name": "systemd-growfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-growfs@.service": { "name": "systemd-growfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-clear.service": { "name": "systemd-hibernate-clear.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate-resume.service": { "name": "systemd-hibernate-resume.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald-sync@.service": { "name": "systemd-journald-sync@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-journald@.service": { "name": "systemd-journald@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-network-generator.service": { "name": "systemd-network-generator.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-networkd-wait-online.service": { "name": "systemd-networkd-wait-online.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-oomd.service": { "name": "systemd-oomd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-pcrextend@.service": { "name": "systemd-pcrextend@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrfs-root.service": { "name": "systemd-pcrfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pcrfs@.service": { "name": "systemd-pcrfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrlock-file-system.service": { "name": "systemd-pcrlock-file-system.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-code.service": { "name": "systemd-pcrlock-firmware-code.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-config.service": { "name": "systemd-pcrlock-firmware-config.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-machine-id.service": { "name": "systemd-pcrlock-machine-id.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-make-policy.service": { "name": "systemd-pcrlock-make-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-authority.service": { "name": "systemd-pcrlock-secureboot-authority.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-policy.service": { "name": "systemd-pcrlock-secureboot-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock@.service": { "name": "systemd-pcrlock@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrmachine.service": { "name": "systemd-pcrmachine.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-initrd.service": { "name": "systemd-pcrphase-initrd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-sysinit.service": { "name": "systemd-pcrphase-sysinit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase.service": { "name": "systemd-pcrphase.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-quotacheck-root.service": { "name": "systemd-quotacheck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-quotacheck@.service": { "name": "systemd-quotacheck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-repart.service": { "name": "systemd-repart.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-soft-reboot.service": { "name": "systemd-soft-reboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysext.service": { "name": "systemd-sysext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-sysext@.service": { "name": "systemd-sysext@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-sysupdate-reboot.service": { "name": "systemd-sysupdate-reboot.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysupdate.service": { "name": "systemd-sysupdate.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev-early.service": { "name": "systemd-tmpfiles-setup-dev-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup-early.service": { "name": "systemd-tpm2-setup-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup.service": { "name": "systemd-tpm2-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-load-credentials.service": { "name": "systemd-udev-load-credentials.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "active" }, "user-runtime-dir@1111.service": { "name": "user-runtime-dir@1111.service", "source": "systemd", "state": "stopped", "status": "active" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "active" }, "user@1111.service": { "name": "user@1111.service", "source": "systemd", "state": "running", "status": "active" }, "ypbind.service": { "name": "ypbind.service", "source": "systemd", "state": "stopped", "status": "not-found" } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 18 January 2025 11:31:41 -0500 (0:00:01.997) 0:02:22.840 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 18 January 2025 11:31:41 -0500 (0:00:00.037) 0:02:22.878 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": { "Network": {} }, "__podman_quadlet_str": "", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 18 January 2025 11:31:41 -0500 (0:00:00.063) 0:02:22.941 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "absent", "__podman_systemd_unit_scope": "", "__podman_user": "user_quadlet_basic" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 18 January 2025 11:31:41 -0500 (0:00:00.044) 0:02:22.986 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_quadlet_spec | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 18 January 2025 11:31:41 -0500 (0:00:00.037) 0:02:23.023 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-basic-unused-network", "__podman_quadlet_type": "network", "__podman_rootless": true }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 18 January 2025 11:31:41 -0500 (0:00:00.050) 0:02:23.074 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 18 January 2025 11:31:41 -0500 (0:00:00.062) 0:02:23.136 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 18 January 2025 11:31:41 -0500 (0:00:00.039) 0:02:23.176 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 18 January 2025 11:31:41 -0500 (0:00:00.039) 0:02:23.215 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_group": "1111" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 18 January 2025 11:31:42 -0500 (0:00:00.046) 0:02:23.262 ****** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1737217683.9532003, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1737217679.1402063, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9160785, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "1643853349", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 18 January 2025 11:31:42 -0500 (0:00:00.396) 0:02:23.658 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "getsubids", "user_quadlet_basic" ], "delta": "0:00:00.004749", "end": "2025-01-18 11:31:42.737000", "rc": 0, "start": "2025-01-18 11:31:42.732251" } STDOUT: 0: user_quadlet_basic 589824 65536 TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 18 January 2025 11:31:42 -0500 (0:00:00.395) 0:02:24.054 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "getsubids", "-g", "user_quadlet_basic" ], "delta": "0:00:00.005272", "end": "2025-01-18 11:31:43.133871", "rc": 0, "start": "2025-01-18 11:31:43.128599" } STDOUT: 0: user_quadlet_basic 589824 65536 TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 18 January 2025 11:31:43 -0500 (0:00:00.398) 0:02:24.452 ****** ok: [managed-node3] => { "ansible_facts": { "podman_subgid_info": { "user_quadlet_basic": { "range": 65536, "start": 589824 } }, "podman_subuid_info": { "user_quadlet_basic": { "range": 65536, "start": 589824 } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 18 January 2025 11:31:43 -0500 (0:00:00.050) 0:02:24.503 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 18 January 2025 11:31:43 -0500 (0:00:00.101) 0:02:24.604 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 18 January 2025 11:31:43 -0500 (0:00:00.037) 0:02:24.642 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 18 January 2025 11:31:43 -0500 (0:00:00.036) 0:02:24.678 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 18 January 2025 11:31:43 -0500 (0:00:00.038) 0:02:24.717 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 18 January 2025 11:31:43 -0500 (0:00:00.035) 0:02:24.752 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-basic-unused-network-network.service", "__podman_systemd_scope": "user", "__podman_user_home_dir": "/home/user_quadlet_basic", "__podman_xdg_runtime_dir": "/run/user/1111" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 18 January 2025 11:31:43 -0500 (0:00:00.061) 0:02:24.814 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_path": "/home/user_quadlet_basic/.config/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 18 January 2025 11:31:43 -0500 (0:00:00.038) 0:02:24.853 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 18 January 2025 11:31:43 -0500 (0:00:00.033) 0:02:24.886 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-unused-network.network", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 18 January 2025 11:31:43 -0500 (0:00:00.082) 0:02:24.969 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 18 January 2025 11:31:43 -0500 (0:00:00.060) 0:02:25.029 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4 Saturday 18 January 2025 11:31:43 -0500 (0:00:00.079) 0:02:25.109 ****** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1737217807.4361207, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "ctime": 1737217848.8281062, "dev": 43, "device_type": 0, "executable": true, "exists": true, "gid": 1111, "gr_name": "user_quadlet_basic", "inode": 1, "isblk": false, "ischr": false, "isdir": true, "isfifo": false, "isgid": false, "islnk": false, "isreg": false, "issock": false, "isuid": false, "mimetype": "inode/directory", "mode": "0700", "mtime": 1737217848.8281062, "nlink": 7, "path": "/run/user/1111", "pw_name": "user_quadlet_basic", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 160, "uid": 1111, "version": null, "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": true } } TASK [fedora.linux_system_roles.podman : Stop and disable service] ************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 Saturday 18 January 2025 11:31:44 -0500 (0:00:00.399) 0:02:25.509 ****** changed: [managed-node3] => { "changed": true, "enabled": false, "failed_when_result": false, "name": "quadlet-basic-unused-network-network.service", "state": "stopped", "status": { "AccessSELinuxContext": "unconfined_u:object_r:user_tmp_t:s0", "ActiveEnterTimestamp": "Sat 2025-01-18 11:30:23 EST", "ActiveEnterTimestampMonotonic": "396813503", "ActiveExitTimestampMonotonic": "0", "ActiveState": "active", "After": "basic.target app.slice -.mount run-user-1111.mount podman-user-wait-network-online.service", "AllowIsolate": "no", "AssertResult": "yes", "AssertTimestamp": "Sat 2025-01-18 11:30:23 EST", "AssertTimestampMonotonic": "396770508", "Before": "shutdown.target", "BindLogSockets": "no", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "35139000", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanLiveMount": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Sat 2025-01-18 11:30:23 EST", "ConditionTimestampMonotonic": "396770505", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroupId": "5905", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DebugInvocation": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "quadlet-basic-unused-network-network.service", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveMemoryHigh": "3698237440", "EffectiveMemoryMax": "3698237440", "EffectiveTasksMax": "22365", "ExecMainCode": "1", "ExecMainExitTimestamp": "Sat 2025-01-18 11:30:23 EST", "ExecMainExitTimestampMonotonic": "396813298", "ExecMainHandoffTimestamp": "Sat 2025-01-18 11:30:23 EST", "ExecMainHandoffTimestampMonotonic": "396784206", "ExecMainPID": "33341", "ExecMainStartTimestamp": "Sat 2025-01-18 11:30:23 EST", "ExecMainStartTimestampMonotonic": "396771081", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman network create --ignore systemd-quadlet-basic-unused-network ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman network create --ignore systemd-quadlet-basic-unused-network ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/run/user/1111/systemd/generator/quadlet-basic-unused-network-network.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "quadlet-basic-unused-network-network.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestamp": "Sat 2025-01-18 11:30:23 EST", "InactiveExitTimestampMonotonic": "396771575", "InvocationID": "1f4b099513034d21beadaa0f65220b24", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "inherit", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13978", "LimitNPROCSoft": "13978", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13978", "LimitSIGPENDINGSoft": "13978", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LiveMountResult": "success", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureDurationUSec": "[not set]", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3687927808", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "14352384", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "[not set]", "MemorySwapMax": "infinity", "MemorySwapPeak": "0", "MemoryZSwapCurrent": "[not set]", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "quadlet-basic-unused-network-network.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "200", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivatePIDs": "no", "PrivateTmp": "no", "PrivateTmpEx": "no", "PrivateUsers": "no", "PrivateUsersEx": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectControlGroupsEx": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "yes", "RemoveIPC": "no", "Requires": "basic.target app.slice", "RequiresMountsFor": "/run/user/1111/containers", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "app.slice", "SourcePath": "/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-unused-network.network", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestamp": "Sat 2025-01-18 11:30:23 EST", "StateChangeTimestampMonotonic": "396813503", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "exited", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-basic-unused-network-network", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22365", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "infinity", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "oneshot", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "Wants": "podman-user-wait-network-online.service", "WantsMountsFor": "/home/user_quadlet_basic", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0", "WorkingDirectory": "!/home/user_quadlet_basic" } } TASK [fedora.linux_system_roles.podman : See if quadlet file exists] *********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:33 Saturday 18 January 2025 11:31:44 -0500 (0:00:00.691) 0:02:26.201 ****** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1737217822.8681133, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "52c9d75ecaf81203cc1f1a3b1dd00fcd25067b01", "ctime": 1737217822.2471135, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 1111, "gr_name": "user_quadlet_basic", "inode": 532676826, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1737217821.9631135, "nlink": 1, "path": "/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-unused-network.network", "pw_name": "user_quadlet_basic", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 54, "uid": 1111, "version": "133482598", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:38 Saturday 18 January 2025 11:31:45 -0500 (0:00:00.407) 0:02:26.609 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Slurp quadlet file] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:6 Saturday 18 January 2025 11:31:45 -0500 (0:00:00.071) 0:02:26.680 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:12 Saturday 18 January 2025 11:31:45 -0500 (0:00:00.377) 0:02:27.058 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet yaml file] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:44 Saturday 18 January 2025 11:31:45 -0500 (0:00:00.134) 0:02:27.193 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Reset raw variable] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:52 Saturday 18 January 2025 11:31:45 -0500 (0:00:00.036) 0:02:27.229 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_raw": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Remove quadlet file] ****************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42 Saturday 18 January 2025 11:31:46 -0500 (0:00:00.034) 0:02:27.263 ****** changed: [managed-node3] => { "changed": true, "path": "/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-unused-network.network", "state": "absent" } TASK [fedora.linux_system_roles.podman : Refresh systemd] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:48 Saturday 18 January 2025 11:31:46 -0500 (0:00:00.388) 0:02:27.652 ****** ok: [managed-node3] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Remove managed resource] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:58 Saturday 18 January 2025 11:31:47 -0500 (0:00:00.646) 0:02:28.298 ****** changed: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } TASK [fedora.linux_system_roles.podman : Remove volumes] *********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99 Saturday 18 January 2025 11:31:47 -0500 (0:00:00.512) 0:02:28.811 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Clear parsed podman variable] ********* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:116 Saturday 18 January 2025 11:31:47 -0500 (0:00:00.047) 0:02:28.858 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_parsed": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ******** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:120 Saturday 18 January 2025 11:31:47 -0500 (0:00:00.035) 0:02:28.893 ****** changed: [managed-node3] => { "changed": true, "cmd": [ "podman", "image", "prune", "--all", "-f" ], "delta": "0:00:00.042761", "end": "2025-01-18 11:31:48.071898", "rc": 0, "start": "2025-01-18 11:31:48.029137" } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:131 Saturday 18 January 2025 11:31:48 -0500 (0:00:00.499) 0:02:29.393 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 18 January 2025 11:31:48 -0500 (0:00:00.060) 0:02:29.453 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_item_state | d('present') != 'absent'", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 18 January 2025 11:31:48 -0500 (0:00:00.037) 0:02:29.490 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_item_state | d('present') != 'absent'", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 18 January 2025 11:31:48 -0500 (0:00:00.034) 0:02:29.525 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_cancel_user_linger": [ "user_quadlet_basic" ] }, "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - images] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:141 Saturday 18 January 2025 11:31:48 -0500 (0:00:00.042) 0:02:29.567 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "images", "-n" ], "delta": "0:00:00.045611", "end": "2025-01-18 11:31:48.751763", "rc": 0, "start": "2025-01-18 11:31:48.706152" } TASK [fedora.linux_system_roles.podman : For testing and debugging - volumes] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:150 Saturday 18 January 2025 11:31:48 -0500 (0:00:00.503) 0:02:30.070 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "volume", "ls", "-n" ], "delta": "0:00:00.046767", "end": "2025-01-18 11:31:49.262947", "rc": 0, "start": "2025-01-18 11:31:49.216180" } TASK [fedora.linux_system_roles.podman : For testing and debugging - containers] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:159 Saturday 18 January 2025 11:31:49 -0500 (0:00:00.512) 0:02:30.582 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "ps", "--noheading" ], "delta": "0:00:00.043505", "end": "2025-01-18 11:31:49.764444", "rc": 0, "start": "2025-01-18 11:31:49.720939" } TASK [fedora.linux_system_roles.podman : For testing and debugging - networks] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:168 Saturday 18 January 2025 11:31:49 -0500 (0:00:00.501) 0:02:31.084 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "network", "ls", "-n", "-q" ], "delta": "0:00:00.042820", "end": "2025-01-18 11:31:50.297966", "rc": 0, "start": "2025-01-18 11:31:50.255146" } STDOUT: podman quadlet-basic-name TASK [fedora.linux_system_roles.podman : For testing and debugging - secrets] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:177 Saturday 18 January 2025 11:31:50 -0500 (0:00:00.538) 0:02:31.623 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - pods] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:187 Saturday 18 January 2025 11:31:50 -0500 (0:00:00.568) 0:02:32.191 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - services] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 Saturday 18 January 2025 11:31:51 -0500 (0:00:00.548) 0:02:32.740 ****** ok: [managed-node3] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "audit-rules.service": { "name": "audit-rules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autofs.service": { "name": "autofs.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "capsule@.service": { "name": "capsule@.service", "source": "systemd", "state": "unknown", "status": "static" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd-restricted.service": { "name": "chronyd-restricted.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-broker.service": { "name": "dbus-broker.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "active", "status": "alias" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd.service": { "name": "dhcpcd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd@.service": { "name": "dhcpcd@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "fips-crypto-policy-overlay.service": { "name": "fips-crypto-policy-overlay.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fsidd.service": { "name": "fsidd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "stopped", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "grub2-systemd-integration.service": { "name": "grub2-systemd-integration.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "hv_kvp_daemon.service": { "name": "hv_kvp_daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "logrotate.service": { "name": "logrotate.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm-devices-import.service": { "name": "lvm-devices-import.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "modprobe@.service": { "name": "modprobe@.service", "source": "systemd", "state": "unknown", "status": "static" }, "modprobe@configfs.service": { "name": "modprobe@configfs.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@dm_mod.service": { "name": "modprobe@dm_mod.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@drm.service": { "name": "modprobe@drm.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@efi_pstore.service": { "name": "modprobe@efi_pstore.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@fuse.service": { "name": "modprobe@fuse.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@loop.service": { "name": "modprobe@loop.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "netavark-dhcp-proxy.service": { "name": "netavark-dhcp-proxy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "netavark-firewalld-reload.service": { "name": "netavark-firewalld-reload.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "network.service": { "name": "network.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "static" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "pam_namespace.service": { "name": "pam_namespace.service", "source": "systemd", "state": "inactive", "status": "static" }, "pcscd.service": { "name": "pcscd.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "podman-auto-update.service": { "name": "podman-auto-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-clean-transient.service": { "name": "podman-clean-transient.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-kube@.service": { "name": "podman-kube@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "podman-restart.service": { "name": "podman-restart.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman.service": { "name": "podman.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "inactive", "status": "static" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quotaon-root.service": { "name": "quotaon-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "quotaon@.service": { "name": "quotaon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-svcgssd.service": { "name": "rpc-svcgssd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpmdb-migrate.service": { "name": "rpmdb-migrate.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rpmdb-rebuild.service": { "name": "rpmdb-rebuild.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "selinux-check-proper-disable.service": { "name": "selinux-check-proper-disable.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "serial-getty@ttyS0.service": { "name": "serial-getty@ttyS0.service", "source": "systemd", "state": "running", "status": "active" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ssh-host-keys-migration.service": { "name": "ssh-host-keys-migration.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-unix-local@.service": { "name": "sshd-unix-local@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-battery-check.service": { "name": "systemd-battery-check.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bless-boot.service": { "name": "systemd-bless-boot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-boot-check-no-failures.service": { "name": "systemd-boot-check-no-failures.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-boot-random-seed.service": { "name": "systemd-boot-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-boot-update.service": { "name": "systemd-boot-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-bootctl@.service": { "name": "systemd-bootctl@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-confext.service": { "name": "systemd-confext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-creds@.service": { "name": "systemd-creds@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-growfs-root.service": { "name": "systemd-growfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-growfs@.service": { "name": "systemd-growfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-clear.service": { "name": "systemd-hibernate-clear.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate-resume.service": { "name": "systemd-hibernate-resume.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald-sync@.service": { "name": "systemd-journald-sync@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-journald@.service": { "name": "systemd-journald@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-network-generator.service": { "name": "systemd-network-generator.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-networkd-wait-online.service": { "name": "systemd-networkd-wait-online.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-oomd.service": { "name": "systemd-oomd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-pcrextend@.service": { "name": "systemd-pcrextend@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrfs-root.service": { "name": "systemd-pcrfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pcrfs@.service": { "name": "systemd-pcrfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrlock-file-system.service": { "name": "systemd-pcrlock-file-system.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-code.service": { "name": "systemd-pcrlock-firmware-code.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-config.service": { "name": "systemd-pcrlock-firmware-config.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-machine-id.service": { "name": "systemd-pcrlock-machine-id.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-make-policy.service": { "name": "systemd-pcrlock-make-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-authority.service": { "name": "systemd-pcrlock-secureboot-authority.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-policy.service": { "name": "systemd-pcrlock-secureboot-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock@.service": { "name": "systemd-pcrlock@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrmachine.service": { "name": "systemd-pcrmachine.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-initrd.service": { "name": "systemd-pcrphase-initrd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-sysinit.service": { "name": "systemd-pcrphase-sysinit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase.service": { "name": "systemd-pcrphase.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-quotacheck-root.service": { "name": "systemd-quotacheck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-quotacheck@.service": { "name": "systemd-quotacheck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-repart.service": { "name": "systemd-repart.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-soft-reboot.service": { "name": "systemd-soft-reboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysext.service": { "name": "systemd-sysext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-sysext@.service": { "name": "systemd-sysext@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-sysupdate-reboot.service": { "name": "systemd-sysupdate-reboot.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysupdate.service": { "name": "systemd-sysupdate.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev-early.service": { "name": "systemd-tmpfiles-setup-dev-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup-early.service": { "name": "systemd-tpm2-setup-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup.service": { "name": "systemd-tpm2-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-load-credentials.service": { "name": "systemd-udev-load-credentials.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "active" }, "user-runtime-dir@1111.service": { "name": "user-runtime-dir@1111.service", "source": "systemd", "state": "stopped", "status": "active" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "active" }, "user@1111.service": { "name": "user@1111.service", "source": "systemd", "state": "running", "status": "active" }, "ypbind.service": { "name": "ypbind.service", "source": "systemd", "state": "stopped", "status": "not-found" } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 18 January 2025 11:31:54 -0500 (0:00:03.024) 0:02:35.765 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 18 January 2025 11:31:54 -0500 (0:00:00.033) 0:02:35.798 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": {}, "__podman_quadlet_str": "[Network]\nSubnet=192.168.29.0/24\nGateway=192.168.29.1\nLabel=app=wordpress\nNetworkName=quadlet-basic-name\n", "__podman_quadlet_template_src": "templates/quadlet-basic.network.j2" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 18 January 2025 11:31:54 -0500 (0:00:00.101) 0:02:35.900 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "absent", "__podman_systemd_unit_scope": "", "__podman_user": "user_quadlet_basic" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 18 January 2025 11:31:54 -0500 (0:00:00.043) 0:02:35.943 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_quadlet_str", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 18 January 2025 11:31:54 -0500 (0:00:00.037) 0:02:35.981 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-basic", "__podman_quadlet_type": "network", "__podman_rootless": true }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 18 January 2025 11:31:54 -0500 (0:00:00.051) 0:02:36.032 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 18 January 2025 11:31:54 -0500 (0:00:00.064) 0:02:36.097 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 18 January 2025 11:31:54 -0500 (0:00:00.039) 0:02:36.137 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 18 January 2025 11:31:54 -0500 (0:00:00.041) 0:02:36.178 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_group": "1111" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 18 January 2025 11:31:54 -0500 (0:00:00.048) 0:02:36.226 ****** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1737217683.9532003, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1737217679.1402063, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9160785, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "1643853349", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 18 January 2025 11:31:55 -0500 (0:00:00.397) 0:02:36.624 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "getsubids", "user_quadlet_basic" ], "delta": "0:00:00.003884", "end": "2025-01-18 11:31:55.702009", "rc": 0, "start": "2025-01-18 11:31:55.698125" } STDOUT: 0: user_quadlet_basic 589824 65536 TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 18 January 2025 11:31:55 -0500 (0:00:00.398) 0:02:37.022 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "getsubids", "-g", "user_quadlet_basic" ], "delta": "0:00:00.004894", "end": "2025-01-18 11:31:56.102540", "rc": 0, "start": "2025-01-18 11:31:56.097646" } STDOUT: 0: user_quadlet_basic 589824 65536 TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 18 January 2025 11:31:56 -0500 (0:00:00.419) 0:02:37.442 ****** ok: [managed-node3] => { "ansible_facts": { "podman_subgid_info": { "user_quadlet_basic": { "range": 65536, "start": 589824 } }, "podman_subuid_info": { "user_quadlet_basic": { "range": 65536, "start": 589824 } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 18 January 2025 11:31:56 -0500 (0:00:00.077) 0:02:37.519 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 18 January 2025 11:31:56 -0500 (0:00:00.052) 0:02:37.572 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 18 January 2025 11:31:56 -0500 (0:00:00.108) 0:02:37.681 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 18 January 2025 11:31:56 -0500 (0:00:00.036) 0:02:37.717 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 18 January 2025 11:31:56 -0500 (0:00:00.036) 0:02:37.753 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 18 January 2025 11:31:56 -0500 (0:00:00.037) 0:02:37.791 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-basic-network.service", "__podman_systemd_scope": "user", "__podman_user_home_dir": "/home/user_quadlet_basic", "__podman_xdg_runtime_dir": "/run/user/1111" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 18 January 2025 11:31:56 -0500 (0:00:00.081) 0:02:37.873 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_path": "/home/user_quadlet_basic/.config/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 18 January 2025 11:31:56 -0500 (0:00:00.046) 0:02:37.919 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 18 January 2025 11:31:56 -0500 (0:00:00.038) 0:02:37.958 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic.network", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 18 January 2025 11:31:56 -0500 (0:00:00.090) 0:02:38.048 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 18 January 2025 11:31:56 -0500 (0:00:00.040) 0:02:38.089 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4 Saturday 18 January 2025 11:31:56 -0500 (0:00:00.077) 0:02:38.166 ****** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1737217807.4361207, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "ctime": 1737217848.8281062, "dev": 43, "device_type": 0, "executable": true, "exists": true, "gid": 1111, "gr_name": "user_quadlet_basic", "inode": 1, "isblk": false, "ischr": false, "isdir": true, "isfifo": false, "isgid": false, "islnk": false, "isreg": false, "issock": false, "isuid": false, "mimetype": "inode/directory", "mode": "0700", "mtime": 1737217848.8281062, "nlink": 7, "path": "/run/user/1111", "pw_name": "user_quadlet_basic", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 160, "uid": 1111, "version": null, "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": true } } TASK [fedora.linux_system_roles.podman : Stop and disable service] ************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 Saturday 18 January 2025 11:31:57 -0500 (0:00:00.431) 0:02:38.598 ****** changed: [managed-node3] => { "changed": true, "enabled": false, "failed_when_result": false, "name": "quadlet-basic-network.service", "state": "stopped", "status": { "AccessSELinuxContext": "unconfined_u:object_r:user_tmp_t:s0", "ActiveEnterTimestamp": "Sat 2025-01-18 11:30:17 EST", "ActiveEnterTimestampMonotonic": "391165829", "ActiveExitTimestampMonotonic": "0", "ActiveState": "active", "After": "basic.target app.slice -.mount podman-user-wait-network-online.service run-user-1111.mount", "AllowIsolate": "no", "AssertResult": "yes", "AssertTimestamp": "Sat 2025-01-18 11:30:17 EST", "AssertTimestampMonotonic": "391126911", "Before": "shutdown.target", "BindLogSockets": "no", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "31618000", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanLiveMount": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Sat 2025-01-18 11:30:17 EST", "ConditionTimestampMonotonic": "391126908", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroupId": "5866", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DebugInvocation": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "quadlet-basic-network.service", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveMemoryHigh": "3698237440", "EffectiveMemoryMax": "3698237440", "EffectiveTasksMax": "22365", "ExecMainCode": "1", "ExecMainExitTimestamp": "Sat 2025-01-18 11:30:17 EST", "ExecMainExitTimestampMonotonic": "391165608", "ExecMainHandoffTimestamp": "Sat 2025-01-18 11:30:17 EST", "ExecMainHandoffTimestampMonotonic": "391137218", "ExecMainPID": "32074", "ExecMainStartTimestamp": "Sat 2025-01-18 11:30:17 EST", "ExecMainStartTimestampMonotonic": "391127474", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman network create --ignore --subnet 192.168.29.0/24 --gateway 192.168.29.1 --label app=wordpress quadlet-basic-name ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman network create --ignore --subnet 192.168.29.0/24 --gateway 192.168.29.1 --label app=wordpress quadlet-basic-name ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/run/user/1111/systemd/generator/quadlet-basic-network.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "quadlet-basic-network.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestamp": "Sat 2025-01-18 11:30:17 EST", "InactiveExitTimestampMonotonic": "391127929", "InvocationID": "7ec85a15d39c4cb69f3bc7357e210d3a", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "inherit", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13978", "LimitNPROCSoft": "13978", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13978", "LimitSIGPENDINGSoft": "13978", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LiveMountResult": "success", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureDurationUSec": "[not set]", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3687936000", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "16506880", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "[not set]", "MemorySwapMax": "infinity", "MemorySwapPeak": "0", "MemoryZSwapCurrent": "[not set]", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "quadlet-basic-network.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "200", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivatePIDs": "no", "PrivateTmp": "no", "PrivateTmpEx": "no", "PrivateUsers": "no", "PrivateUsersEx": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectControlGroupsEx": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "yes", "RemoveIPC": "no", "Requires": "basic.target app.slice", "RequiresMountsFor": "/run/user/1111/containers", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "app.slice", "SourcePath": "/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic.network", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestamp": "Sat 2025-01-18 11:30:17 EST", "StateChangeTimestampMonotonic": "391165829", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "exited", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-basic-network", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22365", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "infinity", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "oneshot", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "Wants": "podman-user-wait-network-online.service", "WantsMountsFor": "/home/user_quadlet_basic", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0", "WorkingDirectory": "!/home/user_quadlet_basic" } } TASK [fedora.linux_system_roles.podman : See if quadlet file exists] *********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:33 Saturday 18 January 2025 11:31:58 -0500 (0:00:00.715) 0:02:39.314 ****** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1737217817.230116, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "19c9b17be2af9b9deca5c3bd327f048966750682", "ctime": 1737217816.5541162, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 1111, "gr_name": "user_quadlet_basic", "inode": 452985053, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1737217816.2621164, "nlink": 1, "path": "/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic.network", "pw_name": "user_quadlet_basic", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 105, "uid": 1111, "version": "781081944", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:38 Saturday 18 January 2025 11:31:58 -0500 (0:00:00.433) 0:02:39.747 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Slurp quadlet file] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:6 Saturday 18 January 2025 11:31:58 -0500 (0:00:00.069) 0:02:39.816 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:12 Saturday 18 January 2025 11:31:58 -0500 (0:00:00.382) 0:02:40.199 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet yaml file] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:44 Saturday 18 January 2025 11:31:59 -0500 (0:00:00.051) 0:02:40.251 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Reset raw variable] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:52 Saturday 18 January 2025 11:31:59 -0500 (0:00:00.092) 0:02:40.343 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_raw": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Remove quadlet file] ****************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42 Saturday 18 January 2025 11:31:59 -0500 (0:00:00.035) 0:02:40.379 ****** changed: [managed-node3] => { "changed": true, "path": "/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic.network", "state": "absent" } TASK [fedora.linux_system_roles.podman : Refresh systemd] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:48 Saturday 18 January 2025 11:31:59 -0500 (0:00:00.390) 0:02:40.769 ****** ok: [managed-node3] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Remove managed resource] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:58 Saturday 18 January 2025 11:32:00 -0500 (0:00:00.649) 0:02:41.418 ****** changed: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } TASK [fedora.linux_system_roles.podman : Remove volumes] *********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99 Saturday 18 January 2025 11:32:00 -0500 (0:00:00.524) 0:02:41.943 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Clear parsed podman variable] ********* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:116 Saturday 18 January 2025 11:32:00 -0500 (0:00:00.047) 0:02:41.990 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_parsed": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ******** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:120 Saturday 18 January 2025 11:32:00 -0500 (0:00:00.034) 0:02:42.025 ****** changed: [managed-node3] => { "changed": true, "cmd": [ "podman", "image", "prune", "--all", "-f" ], "delta": "0:00:00.042025", "end": "2025-01-18 11:32:01.213803", "rc": 0, "start": "2025-01-18 11:32:01.171778" } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:131 Saturday 18 January 2025 11:32:01 -0500 (0:00:00.510) 0:02:42.536 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 18 January 2025 11:32:01 -0500 (0:00:00.061) 0:02:42.598 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_item_state | d('present') != 'absent'", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 18 January 2025 11:32:01 -0500 (0:00:00.038) 0:02:42.636 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_item_state | d('present') != 'absent'", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 18 January 2025 11:32:01 -0500 (0:00:00.036) 0:02:42.673 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_cancel_user_linger": [ "user_quadlet_basic" ] }, "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - images] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:141 Saturday 18 January 2025 11:32:01 -0500 (0:00:00.049) 0:02:42.723 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "images", "-n" ], "delta": "0:00:00.040712", "end": "2025-01-18 11:32:01.925874", "rc": 0, "start": "2025-01-18 11:32:01.885162" } TASK [fedora.linux_system_roles.podman : For testing and debugging - volumes] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:150 Saturday 18 January 2025 11:32:02 -0500 (0:00:00.525) 0:02:43.248 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "volume", "ls", "-n" ], "delta": "0:00:00.041734", "end": "2025-01-18 11:32:02.436988", "rc": 0, "start": "2025-01-18 11:32:02.395254" } TASK [fedora.linux_system_roles.podman : For testing and debugging - containers] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:159 Saturday 18 January 2025 11:32:02 -0500 (0:00:00.509) 0:02:43.757 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "ps", "--noheading" ], "delta": "0:00:00.043171", "end": "2025-01-18 11:32:02.956065", "rc": 0, "start": "2025-01-18 11:32:02.912894" } TASK [fedora.linux_system_roles.podman : For testing and debugging - networks] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:168 Saturday 18 January 2025 11:32:03 -0500 (0:00:00.519) 0:02:44.277 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "network", "ls", "-n", "-q" ], "delta": "0:00:00.048269", "end": "2025-01-18 11:32:03.469035", "rc": 0, "start": "2025-01-18 11:32:03.420766" } STDOUT: podman TASK [fedora.linux_system_roles.podman : For testing and debugging - secrets] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:177 Saturday 18 January 2025 11:32:03 -0500 (0:00:00.512) 0:02:44.790 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - pods] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:187 Saturday 18 January 2025 11:32:04 -0500 (0:00:00.518) 0:02:45.308 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - services] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 Saturday 18 January 2025 11:32:04 -0500 (0:00:00.515) 0:02:45.824 ****** ok: [managed-node3] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "audit-rules.service": { "name": "audit-rules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autofs.service": { "name": "autofs.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "capsule@.service": { "name": "capsule@.service", "source": "systemd", "state": "unknown", "status": "static" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd-restricted.service": { "name": "chronyd-restricted.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-broker.service": { "name": "dbus-broker.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "active", "status": "alias" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd.service": { "name": "dhcpcd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd@.service": { "name": "dhcpcd@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "fips-crypto-policy-overlay.service": { "name": "fips-crypto-policy-overlay.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fsidd.service": { "name": "fsidd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "stopped", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "grub2-systemd-integration.service": { "name": "grub2-systemd-integration.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "hv_kvp_daemon.service": { "name": "hv_kvp_daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "logrotate.service": { "name": "logrotate.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm-devices-import.service": { "name": "lvm-devices-import.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "modprobe@.service": { "name": "modprobe@.service", "source": "systemd", "state": "unknown", "status": "static" }, "modprobe@configfs.service": { "name": "modprobe@configfs.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@dm_mod.service": { "name": "modprobe@dm_mod.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@drm.service": { "name": "modprobe@drm.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@efi_pstore.service": { "name": "modprobe@efi_pstore.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@fuse.service": { "name": "modprobe@fuse.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@loop.service": { "name": "modprobe@loop.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "netavark-dhcp-proxy.service": { "name": "netavark-dhcp-proxy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "netavark-firewalld-reload.service": { "name": "netavark-firewalld-reload.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "network.service": { "name": "network.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "static" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "pam_namespace.service": { "name": "pam_namespace.service", "source": "systemd", "state": "inactive", "status": "static" }, "pcscd.service": { "name": "pcscd.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "podman-auto-update.service": { "name": "podman-auto-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-clean-transient.service": { "name": "podman-clean-transient.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-kube@.service": { "name": "podman-kube@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "podman-restart.service": { "name": "podman-restart.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman.service": { "name": "podman.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "inactive", "status": "static" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quotaon-root.service": { "name": "quotaon-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "quotaon@.service": { "name": "quotaon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-svcgssd.service": { "name": "rpc-svcgssd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpmdb-migrate.service": { "name": "rpmdb-migrate.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rpmdb-rebuild.service": { "name": "rpmdb-rebuild.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "selinux-check-proper-disable.service": { "name": "selinux-check-proper-disable.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "serial-getty@ttyS0.service": { "name": "serial-getty@ttyS0.service", "source": "systemd", "state": "running", "status": "active" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ssh-host-keys-migration.service": { "name": "ssh-host-keys-migration.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-unix-local@.service": { "name": "sshd-unix-local@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-battery-check.service": { "name": "systemd-battery-check.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bless-boot.service": { "name": "systemd-bless-boot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-boot-check-no-failures.service": { "name": "systemd-boot-check-no-failures.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-boot-random-seed.service": { "name": "systemd-boot-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-boot-update.service": { "name": "systemd-boot-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-bootctl@.service": { "name": "systemd-bootctl@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-confext.service": { "name": "systemd-confext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-creds@.service": { "name": "systemd-creds@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-growfs-root.service": { "name": "systemd-growfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-growfs@.service": { "name": "systemd-growfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-clear.service": { "name": "systemd-hibernate-clear.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate-resume.service": { "name": "systemd-hibernate-resume.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald-sync@.service": { "name": "systemd-journald-sync@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-journald@.service": { "name": "systemd-journald@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-network-generator.service": { "name": "systemd-network-generator.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-networkd-wait-online.service": { "name": "systemd-networkd-wait-online.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-oomd.service": { "name": "systemd-oomd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-pcrextend@.service": { "name": "systemd-pcrextend@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrfs-root.service": { "name": "systemd-pcrfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pcrfs@.service": { "name": "systemd-pcrfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrlock-file-system.service": { "name": "systemd-pcrlock-file-system.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-code.service": { "name": "systemd-pcrlock-firmware-code.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-config.service": { "name": "systemd-pcrlock-firmware-config.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-machine-id.service": { "name": "systemd-pcrlock-machine-id.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-make-policy.service": { "name": "systemd-pcrlock-make-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-authority.service": { "name": "systemd-pcrlock-secureboot-authority.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-policy.service": { "name": "systemd-pcrlock-secureboot-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock@.service": { "name": "systemd-pcrlock@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrmachine.service": { "name": "systemd-pcrmachine.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-initrd.service": { "name": "systemd-pcrphase-initrd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-sysinit.service": { "name": "systemd-pcrphase-sysinit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase.service": { "name": "systemd-pcrphase.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-quotacheck-root.service": { "name": "systemd-quotacheck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-quotacheck@.service": { "name": "systemd-quotacheck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-repart.service": { "name": "systemd-repart.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-soft-reboot.service": { "name": "systemd-soft-reboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysext.service": { "name": "systemd-sysext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-sysext@.service": { "name": "systemd-sysext@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-sysupdate-reboot.service": { "name": "systemd-sysupdate-reboot.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysupdate.service": { "name": "systemd-sysupdate.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev-early.service": { "name": "systemd-tmpfiles-setup-dev-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup-early.service": { "name": "systemd-tpm2-setup-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup.service": { "name": "systemd-tpm2-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-load-credentials.service": { "name": "systemd-udev-load-credentials.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "active" }, "user-runtime-dir@1111.service": { "name": "user-runtime-dir@1111.service", "source": "systemd", "state": "stopped", "status": "active" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "active" }, "user@1111.service": { "name": "user@1111.service", "source": "systemd", "state": "running", "status": "active" }, "ypbind.service": { "name": "ypbind.service", "source": "systemd", "state": "stopped", "status": "not-found" } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 18 January 2025 11:32:06 -0500 (0:00:02.186) 0:02:48.010 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Cancel linger] ************************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:196 Saturday 18 January 2025 11:32:06 -0500 (0:00:00.033) 0:02:48.043 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml for managed-node3 => (item=user_quadlet_basic) TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:4 Saturday 18 January 2025 11:32:06 -0500 (0:00:00.104) 0:02:48.148 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_linger_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set cancel linger vars] *************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:11 Saturday 18 January 2025 11:32:06 -0500 (0:00:00.040) 0:02:48.188 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_xdg_runtime_dir": "/run/user/1111" }, "changed": false } TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:16 Saturday 18 January 2025 11:32:06 -0500 (0:00:00.046) 0:02:48.235 ****** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1737217807.4361207, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "ctime": 1737217848.8281062, "dev": 43, "device_type": 0, "executable": true, "exists": true, "gid": 1111, "gr_name": "user_quadlet_basic", "inode": 1, "isblk": false, "ischr": false, "isdir": true, "isfifo": false, "isgid": false, "islnk": false, "isreg": false, "issock": false, "isuid": false, "mimetype": "inode/directory", "mode": "0700", "mtime": 1737217848.8281062, "nlink": 7, "path": "/run/user/1111", "pw_name": "user_quadlet_basic", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 160, "uid": 1111, "version": null, "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": true } } TASK [fedora.linux_system_roles.podman : Gather facts for containers] ********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:21 Saturday 18 January 2025 11:32:07 -0500 (0:00:00.400) 0:02:48.636 ****** ok: [managed-node3] => { "changed": false, "containers": [] } TASK [fedora.linux_system_roles.podman : Gather facts for networks] ************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:30 Saturday 18 January 2025 11:32:07 -0500 (0:00:00.596) 0:02:49.232 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "network", "ls", "-q" ], "delta": "0:00:00.050496", "end": "2025-01-18 11:32:08.424583", "rc": 0, "start": "2025-01-18 11:32:08.374087" } STDOUT: podman TASK [fedora.linux_system_roles.podman : Gather secrets] *********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:40 Saturday 18 January 2025 11:32:08 -0500 (0:00:00.513) 0:02:49.746 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "secret", "ls", "-n", "-q" ], "delta": "0:00:00.046746", "end": "2025-01-18 11:32:08.931894", "rc": 0, "start": "2025-01-18 11:32:08.885148" } TASK [fedora.linux_system_roles.podman : Cancel linger if no more resources are in use] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:50 Saturday 18 January 2025 11:32:09 -0500 (0:00:00.504) 0:02:50.251 ****** changed: [managed-node3] => { "changed": true, "cmd": [ "loginctl", "disable-linger", "user_quadlet_basic" ], "delta": "0:00:00.008040", "end": "2025-01-18 11:32:09.339307", "rc": 0, "start": "2025-01-18 11:32:09.331267" } TASK [fedora.linux_system_roles.podman : Wait for user session to exit closing state] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:62 Saturday 18 January 2025 11:32:09 -0500 (0:00:00.447) 0:02:50.698 ****** fatal: [managed-node3]: FAILED! => { "attempts": 1, "changed": false, "cmd": [ "loginctl", "show-user", "--value", "-p", "State", "user_quadlet_basic" ], "delta": "0:00:00.007399", "end": "2025-01-18 11:32:09.780680", "rc": 1, "start": "2025-01-18 11:32:09.773281" } STDERR: Failed to get user: User ID 1111 is not logged in or lingering MSG: non-zero return code ...ignoring TASK [fedora.linux_system_roles.podman : Stop logind] ************************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:76 Saturday 18 January 2025 11:32:09 -0500 (0:00:00.410) 0:02:51.108 ****** changed: [managed-node3] => { "changed": true, "name": "systemd-logind", "state": "stopped", "status": { "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0", "ActiveEnterTimestamp": "Sat 2025-01-18 11:23:55 EST", "ActiveEnterTimestampMonotonic": "8397382", "ActiveExitTimestampMonotonic": "0", "ActiveState": "active", "After": "nss-user-lookup.target modprobe@drm.service system.slice basic.target sysinit.target dbus.socket systemd-remount-fs.service -.mount user.slice systemd-journald.socket tmp.mount systemd-tmpfiles-setup.service", "AllowIsolate": "no", "AssertResult": "yes", "AssertTimestamp": "Sat 2025-01-18 11:23:55 EST", "AssertTimestampMonotonic": "8225093", "Before": "session-3.scope user-runtime-dir@0.service user@0.service multi-user.target session-5.scope shutdown.target", "BindLogSockets": "no", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "BusName": "org.freedesktop.login1", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "330411000", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanClean": "runtime state fdstore", "CanFreeze": "yes", "CanIsolate": "no", "CanLiveMount": "yes", "CanReload": "yes", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_linux_immutable cap_sys_admin cap_sys_tty_config cap_audit_control cap_mac_admin", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Sat 2025-01-18 11:23:55 EST", "ConditionTimestampMonotonic": "8225091", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroup": "/system.slice/systemd-logind.service", "ControlGroupId": "2743", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DebugInvocation": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "User Login Management", "DeviceAllow": "block-* r", "DevicePolicy": "auto", "Documentation": "\"man:sd-login(3)\" \"man:systemd-logind.service(8)\" \"man:logind.conf(5)\" \"man:org.freedesktop.login1(5)\"", "DropInPaths": "/usr/lib/systemd/system/systemd-logind.service.d/10-grub2-logind-service.conf", "DynamicUser": "no", "EffectiveMemoryHigh": "3698237440", "EffectiveMemoryMax": "3698237440", "EffectiveTasksMax": "22365", "Environment": "SYSTEMD_REBOOT_TO_BOOT_LOADER_MENU=true", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestamp": "Sat 2025-01-18 11:23:55 EST", "ExecMainHandoffTimestampMonotonic": "8369274", "ExecMainPID": "646", "ExecMainStartTimestamp": "Sat 2025-01-18 11:23:55 EST", "ExecMainStartTimestampMonotonic": "8235300", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/lib/systemd/systemd-logind ; argv[]=/usr/lib/systemd/systemd-logind ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/lib/systemd/systemd-logind ; argv[]=/usr/lib/systemd/systemd-logind ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "768", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/usr/lib/systemd/system/systemd-logind.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPAddressDeny": "0.0.0.0/0 ::/0", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "systemd-logind.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestamp": "Sat 2025-01-18 11:23:55 EST", "InactiveExitTimestampMonotonic": "8241237", "InvocationID": "6214b401c1ac4a759b1c852cb610a811", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "524288", "LimitNPROC": "13978", "LimitNPROCSoft": "13978", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13978", "LimitSIGPENDINGSoft": "13978", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LiveMountResult": "success", "LoadState": "loaded", "LockPersonality": "yes", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "646", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureDurationUSec": "[not set]", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3232718848", "MemoryCurrent": "6430720", "MemoryDenyWriteExecute": "yes", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "7794688", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "0", "MemorySwapMax": "infinity", "MemorySwapPeak": "0", "MemoryZSwapCurrent": "0", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "3", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "systemd-logind.service dbus-org.freedesktop.login1.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "yes", "NonBlocking": "no", "NotifyAccess": "main", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivatePIDs": "no", "PrivateTmp": "yes", "PrivateTmpEx": "connected", "PrivateUsers": "no", "PrivateUsersEx": "no", "ProcSubset": "all", "ProtectClock": "yes", "ProtectControlGroups": "yes", "ProtectControlGroupsEx": "yes", "ProtectHome": "yes", "ProtectHostname": "yes", "ProtectKernelLogs": "yes", "ProtectKernelModules": "yes", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "strict", "ReadWritePaths": "/etc /run", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "sysinit.target system.slice -.mount", "RequiresMountsFor": "/run/systemd/inhibit /var/lib/systemd/linger /run/systemd/seats /run/systemd/sessions /run/systemd/shutdown /run/systemd/users", "Restart": "always", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "0", "RestartUSecNext": "0", "RestrictAddressFamilies": "AF_NETLINK AF_UNIX", "RestrictNamespaces": "yes", "RestrictRealtime": "yes", "RestrictSUIDSGID": "yes", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectory": "systemd/inhibit systemd/seats systemd/sessions systemd/shutdown systemd/users", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "yes", "RuntimeDirectorySymlink": "systemd/users:", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestamp": "Sat 2025-01-18 11:23:55 EST", "StateChangeTimestampMonotonic": "8397382", "StateDirectory": "systemd/linger", "StateDirectoryMode": "0755", "StateDirectorySymlink": "systemd/linger:", "StatusErrno": "0", "StatusText": "Processing requests...", "StopWhenUnneeded": "no", "SubState": "running", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallArchitectures": "native", "SystemCallErrorNumber": "1", "SystemCallFilter": "_llseek _newselect accept accept4 access add_key alarm arch_prctl arm_fadvise64_64 bind brk cacheflush capget capset chdir chmod chown chown32 clock_getres clock_getres_time64 clock_gettime clock_gettime64 clock_nanosleep clock_nanosleep_time64 clone clone3 close close_range connect copy_file_range creat dup dup2 dup3 epoll_create epoll_create1 epoll_ctl epoll_ctl_old epoll_pwait epoll_pwait2 epoll_wait epoll_wait_old eventfd eventfd2 execve execveat exit exit_group faccessat faccessat2 fadvise64 fadvise64_64 fallocate fchdir fchmod fchmodat fchown fchown32 fchownat fcntl fcntl64 fdatasync fgetxattr flistxattr flock fork fremovexattr fsetxattr fstat fstat64 fstatat64 fstatfs fstatfs64 fsync ftruncate ftruncate64 futex futex_time64 futimesat get_mempolicy get_robust_list get_thread_area getcpu getcwd getdents getdents64 getegid getegid32 geteuid geteuid32 getgid getgid32 getgroups getgroups32 getitimer getpeername getpgid getpgrp getpid getppid getpriority getrandom getresgid getresgid32 getresuid getresuid32 getrlimit getrusage getsid getsockname getsockopt gettid gettimeofday getuid getuid32 getxattr inotify_add_watch inotify_init inotify_init1 inotify_rm_watch io_cancel io_destroy io_getevents io_pgetevents io_pgetevents_time64 io_setup io_submit io_uring_enter io_uring_register io_uring_setup ioctl ioprio_get ioprio_set ipc kcmp keyctl kill landlock_add_rule landlock_create_ruleset landlock_restrict_self lchown lchown32 lgetxattr link linkat listen listxattr llistxattr lremovexattr lseek lsetxattr lstat lstat64 madvise mbind membarrier memfd_create migrate_pages mkdir mkdirat mknod mknodat mlock mlock2 mlockall mmap mmap2 move_pages mprotect mq_getsetattr mq_notify mq_open mq_timedreceive mq_timedreceive_time64 mq_timedsend mq_timedsend_time64 mq_unlink mremap msgctl msgget msgrcv msgsnd msync munlock munlockall munmap name_to_handle_at nanosleep newfstatat nice oldfstat oldlstat oldolduname oldstat olduname open openat openat2 pause personality pidfd_open pidfd_send_signal pipe pipe2 poll ppoll ppoll_time64 prctl pread64 preadv preadv2 prlimit64 process_madvise process_vm_readv process_vm_writev pselect6 pselect6_time64 pwrite64 pwritev pwritev2 read readahead readdir readlink readlinkat readv recv recvfrom recvmmsg recvmmsg_time64 recvmsg remap_file_pages removexattr rename renameat renameat2 request_key restart_syscall riscv_flush_icache rmdir rseq rt_sigaction rt_sigpending rt_sigprocmask rt_sigqueueinfo rt_sigreturn rt_sigsuspend rt_sigtimedwait rt_sigtimedwait_time64 rt_tgsigqueueinfo sched_get_priority_max sched_get_priority_min sched_getaffinity sched_getattr sched_getparam sched_getscheduler sched_rr_get_interval sched_rr_get_interval_time64 sched_setaffinity sched_setattr sched_setparam sched_setscheduler sched_yield seccomp select semctl semget semop semtimedop semtimedop_time64 send sendfile sendfile64 sendmmsg sendmsg sendto set_mempolicy set_robust_list set_thread_area set_tid_address set_tls setfsgid setfsgid32 setfsuid setfsuid32 setgid setgid32 setgroups setgroups32 setitimer setns setpgid setpriority setregid setregid32 setresgid setresgid32 setresuid setresuid32 setreuid setreuid32 setrlimit setsid setsockopt setuid setuid32 setxattr shmat shmctl shmdt shmget shutdown sigaction sigaltstack signal signalfd signalfd4 sigpending sigprocmask sigreturn sigsuspend socket socketcall socketpair splice stat stat64 statfs statfs64 statx swapcontext symlink symlinkat sync sync_file_range sync_file_range2 syncfs sysinfo tee tgkill time timer_create timer_delete timer_getoverrun timer_gettime timer_gettime64 timer_settime timer_settime64 timerfd_create timerfd_gettime timerfd_gettime64 timerfd_settime timerfd_settime64 times tkill truncate truncate64 ugetrlimit umask uname unlink unlinkat unshare userfaultfd utime utimensat utimensat_time64 utimes vfork vmsplice wait4 waitid waitpid write writev", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "1", "TasksMax": "22365", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify-reload", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "static", "UtmpMode": "init", "WantedBy": "multi-user.target", "Wants": "user.slice -.mount modprobe@drm.service dbus.socket", "WantsMountsFor": "/tmp /var/tmp", "WatchdogSignal": "6", "WatchdogTimestamp": "Sat 2025-01-18 11:31:36 EST", "WatchdogTimestampMonotonic": "469374998", "WatchdogUSec": "3min" } } TASK [fedora.linux_system_roles.podman : Wait for user session to exit closing state] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:81 Saturday 18 January 2025 11:32:10 -0500 (0:00:00.593) 0:02:51.702 ****** ok: [managed-node3] => { "attempts": 1, "changed": false, "cmd": [ "loginctl", "show-user", "--value", "-p", "State", "user_quadlet_basic" ], "delta": "0:00:00.074120", "end": "2025-01-18 11:32:10.848165", "failed_when_result": false, "rc": 1, "start": "2025-01-18 11:32:10.774045" } STDERR: Failed to get user: User ID 1111 is not logged in or lingering MSG: non-zero return code TASK [fedora.linux_system_roles.podman : Restart logind] *********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:92 Saturday 18 January 2025 11:32:10 -0500 (0:00:00.472) 0:02:52.175 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__user_state is failed", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle credential files - absent] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:202 Saturday 18 January 2025 11:32:10 -0500 (0:00:00.034) 0:02:52.210 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - absent] ******** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:211 Saturday 18 January 2025 11:32:10 -0500 (0:00:00.032) 0:02:52.243 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [Ensure no resources] ***************************************************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_basic.yml:230 Saturday 18 January 2025 11:32:11 -0500 (0:00:00.111) 0:02:52.355 ****** ok: [managed-node3] => { "changed": false } MSG: All assertions passed TASK [Ensure no linger] ******************************************************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_basic.yml:240 Saturday 18 January 2025 11:32:11 -0500 (0:00:00.057) 0:02:52.412 ****** ok: [managed-node3] => { "changed": false, "failed_when_result": false, "stat": { "exists": false } } TASK [Run the role - root] ***************************************************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_basic.yml:246 Saturday 18 January 2025 11:32:11 -0500 (0:00:00.390) 0:02:52.803 ****** included: fedora.linux_system_roles.podman for managed-node3 TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:3 Saturday 18 January 2025 11:32:11 -0500 (0:00:00.118) 0:02:52.921 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure ansible_facts used by role] **** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:3 Saturday 18 January 2025 11:32:11 -0500 (0:00:00.056) 0:02:52.978 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if system is ostree] ************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:11 Saturday 18 January 2025 11:32:11 -0500 (0:00:00.042) 0:02:53.021 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag to indicate system is ostree] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:16 Saturday 18 January 2025 11:32:11 -0500 (0:00:00.036) 0:02:53.057 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:23 Saturday 18 January 2025 11:32:11 -0500 (0:00:00.034) 0:02:53.092 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag if transactional-update exists] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:28 Saturday 18 January 2025 11:32:11 -0500 (0:00:00.034) 0:02:53.126 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:32 Saturday 18 January 2025 11:32:11 -0500 (0:00:00.050) 0:02:53.177 ****** ok: [managed-node3] => (item=RedHat.yml) => { "ansible_facts": { "__podman_packages": [ "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/vars/RedHat.yml" ], "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml" } skipping: [managed-node3] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node3] => (item=CentOS_10.yml) => { "ansible_facts": { "__podman_packages": [ "iptables-nft", "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } ok: [managed-node3] => (item=CentOS_10.yml) => { "ansible_facts": { "__podman_packages": [ "iptables-nft", "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } TASK [fedora.linux_system_roles.podman : Gather the package facts] ************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 Saturday 18 January 2025 11:32:12 -0500 (0:00:00.081) 0:02:53.258 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Enable copr if requested] ************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:10 Saturday 18 January 2025 11:32:12 -0500 (0:00:00.855) 0:02:54.114 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_use_copr | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure required packages are installed] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:14 Saturday 18 January 2025 11:32:12 -0500 (0:00:00.112) 0:02:54.226 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "(__podman_packages | difference(ansible_facts.packages))", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:28 Saturday 18 January 2025 11:32:13 -0500 (0:00:00.048) 0:02:54.275 ****** skipping: [managed-node3] => { "false_condition": "__podman_is_transactional | d(false)" } TASK [fedora.linux_system_roles.podman : Reboot transactional update systems] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:33 Saturday 18 January 2025 11:32:13 -0500 (0:00:00.036) 0:02:54.311 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if reboot is needed and not set] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:38 Saturday 18 January 2025 11:32:13 -0500 (0:00:00.036) 0:02:54.348 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get podman version] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:46 Saturday 18 January 2025 11:32:13 -0500 (0:00:00.036) 0:02:54.384 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "--version" ], "delta": "0:00:00.024714", "end": "2025-01-18 11:32:13.475312", "rc": 0, "start": "2025-01-18 11:32:13.450598" } STDOUT: podman version 5.3.1 TASK [fedora.linux_system_roles.podman : Set podman version] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:52 Saturday 18 January 2025 11:32:13 -0500 (0:00:00.410) 0:02:54.795 ****** ok: [managed-node3] => { "ansible_facts": { "podman_version": "5.3.1" }, "changed": false } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.2 or later] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:56 Saturday 18 January 2025 11:32:13 -0500 (0:00:00.037) 0:02:54.833 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_version is version(\"4.2\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:63 Saturday 18 January 2025 11:32:13 -0500 (0:00:00.035) 0:02:54.868 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_version is version(\"4.4\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:73 Saturday 18 January 2025 11:32:13 -0500 (0:00:00.062) 0:02:54.931 ****** META: end_host conditional evaluated to False, continuing execution for managed-node3 skipping: [managed-node3] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node3" } MSG: end_host conditional evaluated to false, continuing execution for managed-node3 TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:80 Saturday 18 January 2025 11:32:13 -0500 (0:00:00.066) 0:02:54.998 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__has_type_pod or __has_pod_file_ext or __has_pod_file_src_ext or __has_pod_template_src_ext or __has_pod_template_src_ext_j2", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:96 Saturday 18 January 2025 11:32:13 -0500 (0:00:00.059) 0:02:55.058 ****** META: end_host conditional evaluated to False, continuing execution for managed-node3 skipping: [managed-node3] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node3" } MSG: end_host conditional evaluated to false, continuing execution for managed-node3 TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:109 Saturday 18 January 2025 11:32:13 -0500 (0:00:00.060) 0:02:55.118 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 18 January 2025 11:32:13 -0500 (0:00:00.068) 0:02:55.187 ****** ok: [managed-node3] => { "ansible_facts": { "getent_passwd": { "root": [ "x", "0", "0", "Super User", "/root", "/bin/bash" ] } }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 18 January 2025 11:32:14 -0500 (0:00:00.407) 0:02:55.594 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 18 January 2025 11:32:14 -0500 (0:00:00.041) 0:02:55.636 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 18 January 2025 11:32:14 -0500 (0:00:00.047) 0:02:55.683 ****** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1737217683.9532003, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1737217679.1402063, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9160785, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "1643853349", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 18 January 2025 11:32:14 -0500 (0:00:00.471) 0:02:56.154 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 18 January 2025 11:32:14 -0500 (0:00:00.038) 0:02:56.192 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 18 January 2025 11:32:14 -0500 (0:00:00.036) 0:02:56.229 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 18 January 2025 11:32:15 -0500 (0:00:00.037) 0:02:56.267 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 18 January 2025 11:32:15 -0500 (0:00:00.046) 0:02:56.314 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 18 January 2025 11:32:15 -0500 (0:00:00.049) 0:02:56.363 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 18 January 2025 11:32:15 -0500 (0:00:00.037) 0:02:56.401 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 18 January 2025 11:32:15 -0500 (0:00:00.040) 0:02:56.441 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set config file paths] **************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:115 Saturday 18 January 2025 11:32:15 -0500 (0:00:00.038) 0:02:56.479 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_container_conf_file": "/etc/containers/containers.conf.d/50-systemroles.conf", "__podman_policy_json_file": "/etc/containers/policy.json", "__podman_registries_conf_file": "/etc/containers/registries.conf.d/50-systemroles.conf", "__podman_storage_conf_file": "/etc/containers/storage.conf" }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle container.conf.d] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:124 Saturday 18 January 2025 11:32:15 -0500 (0:00:00.043) 0:02:56.523 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure containers.d exists] *********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:5 Saturday 18 January 2025 11:32:15 -0500 (0:00:00.062) 0:02:56.585 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update container config file] ********* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:13 Saturday 18 January 2025 11:32:15 -0500 (0:00:00.034) 0:02:56.619 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle registries.conf.d] ************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:127 Saturday 18 January 2025 11:32:15 -0500 (0:00:00.035) 0:02:56.655 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure registries.d exists] *********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:5 Saturday 18 January 2025 11:32:15 -0500 (0:00:00.062) 0:02:56.718 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update registries config file] ******** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:13 Saturday 18 January 2025 11:32:15 -0500 (0:00:00.035) 0:02:56.754 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle storage.conf] ****************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:130 Saturday 18 January 2025 11:32:15 -0500 (0:00:00.033) 0:02:56.788 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure storage.conf parent dir exists] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:5 Saturday 18 January 2025 11:32:15 -0500 (0:00:00.133) 0:02:56.921 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update storage config file] *********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:13 Saturday 18 January 2025 11:32:15 -0500 (0:00:00.037) 0:02:56.958 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle policy.json] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:133 Saturday 18 January 2025 11:32:15 -0500 (0:00:00.035) 0:02:56.993 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure policy.json parent dir exists] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:6 Saturday 18 January 2025 11:32:15 -0500 (0:00:00.068) 0:02:57.062 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat the policy.json file] ************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:14 Saturday 18 January 2025 11:32:15 -0500 (0:00:00.035) 0:02:57.098 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get the existing policy.json] ********* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:19 Saturday 18 January 2025 11:32:15 -0500 (0:00:00.037) 0:02:57.135 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Write new policy.json file] *********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:25 Saturday 18 January 2025 11:32:15 -0500 (0:00:00.037) 0:02:57.172 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [Manage firewall for specified ports] ************************************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:139 Saturday 18 January 2025 11:32:15 -0500 (0:00:00.035) 0:02:57.208 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_firewall | length > 0", "skip_reason": "Conditional result was False" } TASK [Manage selinux for specified ports] ************************************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:146 Saturday 18 January 2025 11:32:15 -0500 (0:00:00.037) 0:02:57.245 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_selinux_ports | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Keep track of users that need to cancel linger] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:153 Saturday 18 January 2025 11:32:16 -0500 (0:00:00.034) 0:02:57.279 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_cancel_user_linger": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - present] ******* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:157 Saturday 18 January 2025 11:32:16 -0500 (0:00:00.036) 0:02:57.316 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle credential files - present] **** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:166 Saturday 18 January 2025 11:32:16 -0500 (0:00:00.032) 0:02:57.348 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle secrets] *********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:175 Saturday 18 January 2025 11:32:16 -0500 (0:00:00.051) 0:02:57.400 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml for managed-node3 => (item=(censored due to no_log)) included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml for managed-node3 => (item=(censored due to no_log)) TASK [fedora.linux_system_roles.podman : Set variables part 1] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:3 Saturday 18 January 2025 11:32:16 -0500 (0:00:00.107) 0:02:57.508 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:7 Saturday 18 January 2025 11:32:16 -0500 (0:00:00.039) 0:02:57.547 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node3 => (item=(censored due to no_log)) TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 18 January 2025 11:32:16 -0500 (0:00:00.129) 0:02:57.677 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 18 January 2025 11:32:16 -0500 (0:00:00.040) 0:02:57.717 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 18 January 2025 11:32:16 -0500 (0:00:00.039) 0:02:57.756 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 18 January 2025 11:32:16 -0500 (0:00:00.051) 0:02:57.808 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 18 January 2025 11:32:16 -0500 (0:00:00.036) 0:02:57.844 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 18 January 2025 11:32:16 -0500 (0:00:00.035) 0:02:57.880 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 18 January 2025 11:32:16 -0500 (0:00:00.036) 0:02:57.916 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 18 January 2025 11:32:16 -0500 (0:00:00.034) 0:02:57.950 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 18 January 2025 11:32:16 -0500 (0:00:00.035) 0:02:57.986 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 18 January 2025 11:32:16 -0500 (0:00:00.036) 0:02:58.022 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 18 January 2025 11:32:16 -0500 (0:00:00.034) 0:02:58.057 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 18 January 2025 11:32:16 -0500 (0:00:00.035) 0:02:58.093 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set variables part 2] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:14 Saturday 18 January 2025 11:32:16 -0500 (0:00:00.035) 0:02:58.128 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_rootless": false, "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:20 Saturday 18 January 2025 11:32:16 -0500 (0:00:00.044) 0:02:58.172 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 18 January 2025 11:32:16 -0500 (0:00:00.062) 0:02:58.234 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 18 January 2025 11:32:17 -0500 (0:00:00.034) 0:02:58.269 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 18 January 2025 11:32:17 -0500 (0:00:00.032) 0:02:58.301 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:25 Saturday 18 January 2025 11:32:17 -0500 (0:00:00.108) 0:02:58.409 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Manage each secret] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:41 Saturday 18 January 2025 11:32:17 -0500 (0:00:00.054) 0:02:58.463 ****** changed: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } TASK [fedora.linux_system_roles.podman : Set variables part 1] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:3 Saturday 18 January 2025 11:32:17 -0500 (0:00:00.527) 0:02:58.991 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:7 Saturday 18 January 2025 11:32:17 -0500 (0:00:00.041) 0:02:59.032 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node3 => (item=(censored due to no_log)) TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 18 January 2025 11:32:17 -0500 (0:00:00.062) 0:02:59.095 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 18 January 2025 11:32:17 -0500 (0:00:00.040) 0:02:59.136 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 18 January 2025 11:32:17 -0500 (0:00:00.041) 0:02:59.177 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 18 January 2025 11:32:17 -0500 (0:00:00.049) 0:02:59.227 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 18 January 2025 11:32:18 -0500 (0:00:00.033) 0:02:59.261 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 18 January 2025 11:32:18 -0500 (0:00:00.036) 0:02:59.297 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 18 January 2025 11:32:18 -0500 (0:00:00.036) 0:02:59.333 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 18 January 2025 11:32:18 -0500 (0:00:00.036) 0:02:59.370 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 18 January 2025 11:32:18 -0500 (0:00:00.034) 0:02:59.405 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 18 January 2025 11:32:18 -0500 (0:00:00.035) 0:02:59.440 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 18 January 2025 11:32:18 -0500 (0:00:00.047) 0:02:59.488 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 18 January 2025 11:32:18 -0500 (0:00:00.045) 0:02:59.533 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set variables part 2] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:14 Saturday 18 January 2025 11:32:18 -0500 (0:00:00.036) 0:02:59.570 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_rootless": false, "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:20 Saturday 18 January 2025 11:32:18 -0500 (0:00:00.044) 0:02:59.615 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 18 January 2025 11:32:18 -0500 (0:00:00.134) 0:02:59.749 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 18 January 2025 11:32:18 -0500 (0:00:00.034) 0:02:59.784 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 18 January 2025 11:32:18 -0500 (0:00:00.034) 0:02:59.818 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:25 Saturday 18 January 2025 11:32:18 -0500 (0:00:00.033) 0:02:59.852 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Manage each secret] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:41 Saturday 18 January 2025 11:32:18 -0500 (0:00:00.034) 0:02:59.887 ****** changed: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } TASK [fedora.linux_system_roles.podman : Handle Kubernetes specifications] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:182 Saturday 18 January 2025 11:32:19 -0500 (0:00:00.526) 0:03:00.413 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Quadlet specifications] ******** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:189 Saturday 18 January 2025 11:32:19 -0500 (0:00:00.031) 0:03:00.445 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node3 => (item=(censored due to no_log)) included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node3 => (item=(censored due to no_log)) included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node3 => (item=(censored due to no_log)) included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node3 => (item=(censored due to no_log)) included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node3 => (item=(censored due to no_log)) TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 18 January 2025 11:32:19 -0500 (0:00:00.154) 0:03:00.599 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": {}, "__podman_quadlet_str": "[Network]\nSubnet=192.168.29.0/24\nGateway=192.168.29.1\nLabel=app=wordpress\nNetworkName=quadlet-basic-name\n", "__podman_quadlet_template_src": "templates/quadlet-basic.network.j2" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 18 January 2025 11:32:19 -0500 (0:00:00.098) 0:03:00.698 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "created", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 18 January 2025 11:32:19 -0500 (0:00:00.042) 0:03:00.741 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_quadlet_str", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 18 January 2025 11:32:19 -0500 (0:00:00.036) 0:03:00.777 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-basic", "__podman_quadlet_type": "network", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 18 January 2025 11:32:19 -0500 (0:00:00.050) 0:03:00.828 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 18 January 2025 11:32:19 -0500 (0:00:00.064) 0:03:00.892 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 18 January 2025 11:32:19 -0500 (0:00:00.039) 0:03:00.931 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 18 January 2025 11:32:19 -0500 (0:00:00.102) 0:03:01.033 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 18 January 2025 11:32:19 -0500 (0:00:00.047) 0:03:01.081 ****** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1737217683.9532003, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1737217679.1402063, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9160785, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "1643853349", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 18 January 2025 11:32:20 -0500 (0:00:00.403) 0:03:01.485 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 18 January 2025 11:32:20 -0500 (0:00:00.043) 0:03:01.528 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 18 January 2025 11:32:20 -0500 (0:00:00.043) 0:03:01.572 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 18 January 2025 11:32:20 -0500 (0:00:00.065) 0:03:01.637 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 18 January 2025 11:32:20 -0500 (0:00:00.036) 0:03:01.674 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 18 January 2025 11:32:20 -0500 (0:00:00.038) 0:03:01.712 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 18 January 2025 11:32:20 -0500 (0:00:00.038) 0:03:01.750 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 18 January 2025 11:32:20 -0500 (0:00:00.037) 0:03:01.788 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 18 January 2025 11:32:20 -0500 (0:00:00.037) 0:03:01.825 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-basic-network.service", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 18 January 2025 11:32:20 -0500 (0:00:00.064) 0:03:01.890 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 18 January 2025 11:32:20 -0500 (0:00:00.040) 0:03:01.930 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_kube_yamls_raw | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 18 January 2025 11:32:20 -0500 (0:00:00.036) 0:03:01.966 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/etc/containers/systemd/quadlet-basic.network", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 18 January 2025 11:32:20 -0500 (0:00:00.082) 0:03:02.048 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 18 January 2025 11:32:20 -0500 (0:00:00.042) 0:03:02.091 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_state == \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 18 January 2025 11:32:20 -0500 (0:00:00.032) 0:03:02.123 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2 Saturday 18 January 2025 11:32:20 -0500 (0:00:00.071) 0:03:02.195 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 18 January 2025 11:32:21 -0500 (0:00:00.130) 0:03:02.325 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 18 January 2025 11:32:21 -0500 (0:00:00.034) 0:03:02.360 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 18 January 2025 11:32:21 -0500 (0:00:00.034) 0:03:02.395 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create host directories] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7 Saturday 18 January 2025 11:32:21 -0500 (0:00:00.035) 0:03:02.430 ****** skipping: [managed-node3] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Ensure container images are present] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 Saturday 18 January 2025 11:32:21 -0500 (0:00:00.031) 0:03:02.462 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39 Saturday 18 January 2025 11:32:21 -0500 (0:00:00.035) 0:03:02.497 ****** ok: [managed-node3] => { "changed": false, "gid": 0, "group": "root", "mode": "0755", "owner": "root", "path": "/etc/containers/systemd", "secontext": "system_u:object_r:etc_t:s0", "size": 6, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ******** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:48 Saturday 18 January 2025 11:32:22 -0500 (0:00:01.667) 0:03:04.164 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_quadlet_file_src | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:58 Saturday 18 January 2025 11:32:22 -0500 (0:00:00.036) 0:03:04.201 ****** changed: [managed-node3] => { "changed": true, "checksum": "19c9b17be2af9b9deca5c3bd327f048966750682", "dest": "/etc/containers/systemd/quadlet-basic.network", "gid": 0, "group": "root", "md5sum": "313e9a2e5a99f80fa7023c19a1065658", "mode": "0644", "owner": "root", "secontext": "system_u:object_r:etc_t:s0", "size": 105, "src": "/root/.ansible/tmp/ansible-tmp-1737217943.0027647-16717-122948793557998/.source.network", "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] ******* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:70 Saturday 18 January 2025 11:32:23 -0500 (0:00:00.721) 0:03:04.923 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_copy_content is skipped", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Reload systemctl] ********************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:82 Saturday 18 January 2025 11:32:23 -0500 (0:00:00.033) 0:03:04.956 ****** ok: [managed-node3] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Start service] ************************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110 Saturday 18 January 2025 11:32:24 -0500 (0:00:00.782) 0:03:05.739 ****** changed: [managed-node3] => { "changed": true, "name": "quadlet-basic-network.service", "state": "started", "status": { "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0", "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "basic.target system.slice -.mount network-online.target sysinit.target systemd-journald.socket", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "shutdown.target", "BindLogSockets": "no", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanLiveMount": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DebugInvocation": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "quadlet-basic-network.service", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveMemoryHigh": "3698237440", "EffectiveMemoryMax": "3698237440", "EffectiveTasksMax": "22365", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman network create --ignore --subnet 192.168.29.0/24 --gateway 192.168.29.1 --label app=wordpress quadlet-basic-name ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman network create --ignore --subnet 192.168.29.0/24 --gateway 192.168.29.1 --label app=wordpress quadlet-basic-name ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/run/systemd/generator/quadlet-basic-network.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "quadlet-basic-network.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13978", "LimitNPROCSoft": "13978", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13978", "LimitSIGPENDINGSoft": "13978", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LiveMountResult": "success", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureDurationUSec": "[not set]", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3269718016", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "[not set]", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "[not set]", "MemorySwapMax": "infinity", "MemorySwapPeak": "[not set]", "MemoryZSwapCurrent": "[not set]", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "quadlet-basic-network.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivatePIDs": "no", "PrivateTmp": "no", "PrivateTmpEx": "no", "PrivateUsers": "no", "PrivateUsersEx": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectControlGroupsEx": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "yes", "RemoveIPC": "no", "Requires": "sysinit.target -.mount system.slice", "RequiresMountsFor": "/run/containers", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "SourcePath": "/etc/containers/systemd/quadlet-basic.network", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-basic-network", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22365", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "infinity", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "oneshot", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "Wants": "network-online.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity" } } TASK [fedora.linux_system_roles.podman : Restart service] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:125 Saturday 18 January 2025 11:32:25 -0500 (0:00:00.635) 0:03:06.374 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_service_started is changed", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 18 January 2025 11:32:25 -0500 (0:00:00.037) 0:03:06.412 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": { "Network": {} }, "__podman_quadlet_str": "", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 18 January 2025 11:32:25 -0500 (0:00:00.045) 0:03:06.457 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "created", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 18 January 2025 11:32:25 -0500 (0:00:00.042) 0:03:06.500 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_quadlet_spec | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 18 January 2025 11:32:25 -0500 (0:00:00.034) 0:03:06.535 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-basic-unused-network", "__podman_quadlet_type": "network", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 18 January 2025 11:32:25 -0500 (0:00:00.049) 0:03:06.585 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 18 January 2025 11:32:25 -0500 (0:00:00.128) 0:03:06.713 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 18 January 2025 11:32:25 -0500 (0:00:00.038) 0:03:06.752 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 18 January 2025 11:32:25 -0500 (0:00:00.039) 0:03:06.791 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 18 January 2025 11:32:25 -0500 (0:00:00.069) 0:03:06.861 ****** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1737217683.9532003, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1737217679.1402063, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9160785, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "1643853349", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 18 January 2025 11:32:26 -0500 (0:00:00.405) 0:03:07.266 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 18 January 2025 11:32:26 -0500 (0:00:00.037) 0:03:07.303 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 18 January 2025 11:32:26 -0500 (0:00:00.040) 0:03:07.344 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 18 January 2025 11:32:26 -0500 (0:00:00.036) 0:03:07.381 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 18 January 2025 11:32:26 -0500 (0:00:00.034) 0:03:07.415 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 18 January 2025 11:32:26 -0500 (0:00:00.036) 0:03:07.452 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 18 January 2025 11:32:26 -0500 (0:00:00.037) 0:03:07.489 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 18 January 2025 11:32:26 -0500 (0:00:00.035) 0:03:07.524 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 18 January 2025 11:32:26 -0500 (0:00:00.036) 0:03:07.561 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-basic-unused-network-network.service", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 18 January 2025 11:32:26 -0500 (0:00:00.055) 0:03:07.617 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 18 January 2025 11:32:26 -0500 (0:00:00.037) 0:03:07.655 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_kube_yamls_raw | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 18 January 2025 11:32:26 -0500 (0:00:00.034) 0:03:07.690 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/etc/containers/systemd/quadlet-basic-unused-network.network", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 18 January 2025 11:32:26 -0500 (0:00:00.080) 0:03:07.770 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 18 January 2025 11:32:26 -0500 (0:00:00.119) 0:03:07.889 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_state == \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 18 January 2025 11:32:26 -0500 (0:00:00.039) 0:03:07.929 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2 Saturday 18 January 2025 11:32:26 -0500 (0:00:00.072) 0:03:08.002 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 18 January 2025 11:32:26 -0500 (0:00:00.058) 0:03:08.060 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 18 January 2025 11:32:26 -0500 (0:00:00.033) 0:03:08.094 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 18 January 2025 11:32:26 -0500 (0:00:00.033) 0:03:08.128 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create host directories] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7 Saturday 18 January 2025 11:32:26 -0500 (0:00:00.034) 0:03:08.162 ****** skipping: [managed-node3] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Ensure container images are present] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 Saturday 18 January 2025 11:32:26 -0500 (0:00:00.031) 0:03:08.193 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39 Saturday 18 January 2025 11:32:26 -0500 (0:00:00.034) 0:03:08.228 ****** ok: [managed-node3] => { "changed": false, "gid": 0, "group": "root", "mode": "0755", "owner": "root", "path": "/etc/containers/systemd", "secontext": "system_u:object_r:etc_t:s0", "size": 35, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ******** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:48 Saturday 18 January 2025 11:32:27 -0500 (0:00:00.401) 0:03:08.629 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_quadlet_file_src | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:58 Saturday 18 January 2025 11:32:27 -0500 (0:00:00.036) 0:03:08.665 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_quadlet_str | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] ******* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:70 Saturday 18 January 2025 11:32:27 -0500 (0:00:00.034) 0:03:08.700 ****** changed: [managed-node3] => { "changed": true, "checksum": "52c9d75ecaf81203cc1f1a3b1dd00fcd25067b01", "dest": "/etc/containers/systemd/quadlet-basic-unused-network.network", "gid": 0, "group": "root", "md5sum": "968d495367b59475979615e4884cbda2", "mode": "0644", "owner": "root", "secontext": "system_u:object_r:etc_t:s0", "size": 54, "src": "/root/.ansible/tmp/ansible-tmp-1737217947.4991572-16832-22648816520572/.source.network", "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.podman : Reload systemctl] ********************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:82 Saturday 18 January 2025 11:32:28 -0500 (0:00:00.738) 0:03:09.438 ****** ok: [managed-node3] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Start service] ************************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110 Saturday 18 January 2025 11:32:28 -0500 (0:00:00.754) 0:03:10.193 ****** changed: [managed-node3] => { "changed": true, "name": "quadlet-basic-unused-network-network.service", "state": "started", "status": { "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0", "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "system.slice -.mount network-online.target sysinit.target basic.target systemd-journald.socket", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "shutdown.target", "BindLogSockets": "no", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanLiveMount": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DebugInvocation": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "quadlet-basic-unused-network-network.service", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveMemoryHigh": "3698237440", "EffectiveMemoryMax": "3698237440", "EffectiveTasksMax": "22365", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman network create --ignore systemd-quadlet-basic-unused-network ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman network create --ignore systemd-quadlet-basic-unused-network ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/run/systemd/generator/quadlet-basic-unused-network-network.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "quadlet-basic-unused-network-network.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13978", "LimitNPROCSoft": "13978", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13978", "LimitSIGPENDINGSoft": "13978", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LiveMountResult": "success", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureDurationUSec": "[not set]", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3275542528", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "[not set]", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "[not set]", "MemorySwapMax": "infinity", "MemorySwapPeak": "[not set]", "MemoryZSwapCurrent": "[not set]", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "quadlet-basic-unused-network-network.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivatePIDs": "no", "PrivateTmp": "no", "PrivateTmpEx": "no", "PrivateUsers": "no", "PrivateUsersEx": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectControlGroupsEx": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "yes", "RemoveIPC": "no", "Requires": "-.mount sysinit.target system.slice", "RequiresMountsFor": "/run/containers", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "SourcePath": "/etc/containers/systemd/quadlet-basic-unused-network.network", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-basic-unused-network-network", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22365", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "infinity", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "oneshot", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "Wants": "network-online.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity" } } TASK [fedora.linux_system_roles.podman : Restart service] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:125 Saturday 18 January 2025 11:32:29 -0500 (0:00:00.628) 0:03:10.821 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_service_started is changed", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 18 January 2025 11:32:29 -0500 (0:00:00.036) 0:03:10.858 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": { "Volume": { "VolumeName": "quadlet-basic-mysql-name" } }, "__podman_quadlet_str": "", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 18 January 2025 11:32:29 -0500 (0:00:00.043) 0:03:10.901 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "created", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 18 January 2025 11:32:29 -0500 (0:00:00.120) 0:03:11.021 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_quadlet_spec | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 18 January 2025 11:32:29 -0500 (0:00:00.040) 0:03:11.062 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-basic-mysql", "__podman_quadlet_type": "volume", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 18 January 2025 11:32:29 -0500 (0:00:00.050) 0:03:11.113 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 18 January 2025 11:32:29 -0500 (0:00:00.065) 0:03:11.179 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 18 January 2025 11:32:29 -0500 (0:00:00.040) 0:03:11.219 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 18 January 2025 11:32:30 -0500 (0:00:00.040) 0:03:11.260 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 18 January 2025 11:32:30 -0500 (0:00:00.047) 0:03:11.307 ****** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1737217683.9532003, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1737217679.1402063, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9160785, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "1643853349", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 18 January 2025 11:32:30 -0500 (0:00:00.400) 0:03:11.708 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 18 January 2025 11:32:30 -0500 (0:00:00.036) 0:03:11.744 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 18 January 2025 11:32:30 -0500 (0:00:00.036) 0:03:11.781 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 18 January 2025 11:32:30 -0500 (0:00:00.036) 0:03:11.817 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 18 January 2025 11:32:30 -0500 (0:00:00.035) 0:03:11.853 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 18 January 2025 11:32:30 -0500 (0:00:00.037) 0:03:11.890 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 18 January 2025 11:32:30 -0500 (0:00:00.036) 0:03:11.927 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 18 January 2025 11:32:30 -0500 (0:00:00.036) 0:03:11.964 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 18 January 2025 11:32:30 -0500 (0:00:00.036) 0:03:12.001 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-basic-mysql-volume.service", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 18 January 2025 11:32:30 -0500 (0:00:00.075) 0:03:12.077 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 18 January 2025 11:32:30 -0500 (0:00:00.110) 0:03:12.187 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_kube_yamls_raw | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 18 January 2025 11:32:30 -0500 (0:00:00.037) 0:03:12.225 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/etc/containers/systemd/quadlet-basic-mysql.volume", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 18 January 2025 11:32:31 -0500 (0:00:00.080) 0:03:12.305 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 18 January 2025 11:32:31 -0500 (0:00:00.045) 0:03:12.350 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_state == \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 18 January 2025 11:32:31 -0500 (0:00:00.038) 0:03:12.389 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2 Saturday 18 January 2025 11:32:31 -0500 (0:00:00.083) 0:03:12.472 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 18 January 2025 11:32:31 -0500 (0:00:00.062) 0:03:12.534 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 18 January 2025 11:32:31 -0500 (0:00:00.033) 0:03:12.568 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 18 January 2025 11:32:31 -0500 (0:00:00.035) 0:03:12.603 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create host directories] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7 Saturday 18 January 2025 11:32:31 -0500 (0:00:00.032) 0:03:12.636 ****** skipping: [managed-node3] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Ensure container images are present] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 Saturday 18 January 2025 11:32:31 -0500 (0:00:00.031) 0:03:12.667 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39 Saturday 18 January 2025 11:32:31 -0500 (0:00:00.032) 0:03:12.700 ****** ok: [managed-node3] => { "changed": false, "gid": 0, "group": "root", "mode": "0755", "owner": "root", "path": "/etc/containers/systemd", "secontext": "system_u:object_r:etc_t:s0", "size": 79, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ******** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:48 Saturday 18 January 2025 11:32:31 -0500 (0:00:00.420) 0:03:13.120 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_quadlet_file_src | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:58 Saturday 18 January 2025 11:32:31 -0500 (0:00:00.051) 0:03:13.172 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_quadlet_str | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] ******* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:70 Saturday 18 January 2025 11:32:31 -0500 (0:00:00.044) 0:03:13.216 ****** changed: [managed-node3] => { "changed": true, "checksum": "90a3571bfc7670328fe3f8fb625585613dbd9c4a", "dest": "/etc/containers/systemd/quadlet-basic-mysql.volume", "gid": 0, "group": "root", "md5sum": "8682d71bf3c086f228cd72389b7c9018", "mode": "0644", "owner": "root", "secontext": "system_u:object_r:etc_t:s0", "size": 89, "src": "/root/.ansible/tmp/ansible-tmp-1737217952.0192504-16961-155479983655334/.source.volume", "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.podman : Reload systemctl] ********************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:82 Saturday 18 January 2025 11:32:32 -0500 (0:00:00.752) 0:03:13.968 ****** ok: [managed-node3] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Start service] ************************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110 Saturday 18 January 2025 11:32:33 -0500 (0:00:00.755) 0:03:14.724 ****** changed: [managed-node3] => { "changed": true, "name": "quadlet-basic-mysql-volume.service", "state": "started", "status": { "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0", "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "network-online.target sysinit.target basic.target -.mount system.slice systemd-journald.socket", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "shutdown.target", "BindLogSockets": "no", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanLiveMount": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DebugInvocation": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "quadlet-basic-mysql-volume.service", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveMemoryHigh": "3698237440", "EffectiveMemoryMax": "3698237440", "EffectiveTasksMax": "22365", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman volume create --ignore quadlet-basic-mysql-name ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman volume create --ignore quadlet-basic-mysql-name ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/run/systemd/generator/quadlet-basic-mysql-volume.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "quadlet-basic-mysql-volume.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13978", "LimitNPROCSoft": "13978", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13978", "LimitSIGPENDINGSoft": "13978", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LiveMountResult": "success", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureDurationUSec": "[not set]", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3271655424", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "[not set]", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "[not set]", "MemorySwapMax": "infinity", "MemorySwapPeak": "[not set]", "MemoryZSwapCurrent": "[not set]", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "quadlet-basic-mysql-volume.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivatePIDs": "no", "PrivateTmp": "no", "PrivateTmpEx": "no", "PrivateUsers": "no", "PrivateUsersEx": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectControlGroupsEx": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "yes", "RemoveIPC": "no", "Requires": "system.slice -.mount sysinit.target", "RequiresMountsFor": "/run/containers", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "SourcePath": "/etc/containers/systemd/quadlet-basic-mysql.volume", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-basic-mysql-volume", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22365", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "infinity", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "oneshot", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "Wants": "network-online.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity" } } TASK [fedora.linux_system_roles.podman : Restart service] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:125 Saturday 18 January 2025 11:32:34 -0500 (0:00:00.725) 0:03:15.449 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_service_started is changed", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 18 January 2025 11:32:34 -0500 (0:00:00.045) 0:03:15.494 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": { "Volume": {} }, "__podman_quadlet_str": "", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 18 January 2025 11:32:34 -0500 (0:00:00.049) 0:03:15.544 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "created", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 18 January 2025 11:32:34 -0500 (0:00:00.045) 0:03:15.589 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_quadlet_spec | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 18 January 2025 11:32:34 -0500 (0:00:00.036) 0:03:15.626 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-basic-unused-volume", "__podman_quadlet_type": "volume", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 18 January 2025 11:32:34 -0500 (0:00:00.052) 0:03:15.678 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 18 January 2025 11:32:34 -0500 (0:00:00.063) 0:03:15.742 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 18 January 2025 11:32:34 -0500 (0:00:00.040) 0:03:15.783 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 18 January 2025 11:32:34 -0500 (0:00:00.038) 0:03:15.822 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 18 January 2025 11:32:34 -0500 (0:00:00.050) 0:03:15.872 ****** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1737217683.9532003, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1737217679.1402063, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9160785, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "1643853349", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 18 January 2025 11:32:35 -0500 (0:00:00.448) 0:03:16.321 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 18 January 2025 11:32:35 -0500 (0:00:00.060) 0:03:16.382 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 18 January 2025 11:32:35 -0500 (0:00:00.060) 0:03:16.443 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 18 January 2025 11:32:35 -0500 (0:00:00.054) 0:03:16.498 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 18 January 2025 11:32:35 -0500 (0:00:00.041) 0:03:16.540 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 18 January 2025 11:32:35 -0500 (0:00:00.042) 0:03:16.582 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 18 January 2025 11:32:35 -0500 (0:00:00.042) 0:03:16.624 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 18 January 2025 11:32:35 -0500 (0:00:00.111) 0:03:16.735 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 18 January 2025 11:32:35 -0500 (0:00:00.037) 0:03:16.772 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-basic-unused-volume-volume.service", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 18 January 2025 11:32:35 -0500 (0:00:00.061) 0:03:16.834 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 18 January 2025 11:32:35 -0500 (0:00:00.041) 0:03:16.876 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_kube_yamls_raw | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 18 January 2025 11:32:35 -0500 (0:00:00.056) 0:03:16.932 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/etc/containers/systemd/quadlet-basic-unused-volume.volume", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 18 January 2025 11:32:35 -0500 (0:00:00.130) 0:03:17.063 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 18 January 2025 11:32:35 -0500 (0:00:00.068) 0:03:17.132 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_state == \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 18 January 2025 11:32:35 -0500 (0:00:00.058) 0:03:17.190 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2 Saturday 18 January 2025 11:32:36 -0500 (0:00:00.124) 0:03:17.315 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 18 January 2025 11:32:36 -0500 (0:00:00.109) 0:03:17.425 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 18 January 2025 11:32:36 -0500 (0:00:00.057) 0:03:17.482 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 18 January 2025 11:32:36 -0500 (0:00:00.058) 0:03:17.540 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create host directories] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7 Saturday 18 January 2025 11:32:36 -0500 (0:00:00.055) 0:03:17.596 ****** skipping: [managed-node3] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Ensure container images are present] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 Saturday 18 January 2025 11:32:36 -0500 (0:00:00.053) 0:03:17.649 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39 Saturday 18 January 2025 11:32:36 -0500 (0:00:00.054) 0:03:17.704 ****** ok: [managed-node3] => { "changed": false, "gid": 0, "group": "root", "mode": "0755", "owner": "root", "path": "/etc/containers/systemd", "secontext": "system_u:object_r:etc_t:s0", "size": 113, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ******** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:48 Saturday 18 January 2025 11:32:36 -0500 (0:00:00.436) 0:03:18.140 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_quadlet_file_src | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:58 Saturday 18 January 2025 11:32:36 -0500 (0:00:00.059) 0:03:18.199 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_quadlet_str | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] ******* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:70 Saturday 18 January 2025 11:32:37 -0500 (0:00:00.161) 0:03:18.361 ****** changed: [managed-node3] => { "changed": true, "checksum": "fd0ae560360afa5541b866560b1e849d25e216ef", "dest": "/etc/containers/systemd/quadlet-basic-unused-volume.volume", "gid": 0, "group": "root", "md5sum": "4967598a0284ad3e296ab106829a30a2", "mode": "0644", "owner": "root", "secontext": "system_u:object_r:etc_t:s0", "size": 53, "src": "/root/.ansible/tmp/ansible-tmp-1737217957.1720717-17150-132972819108597/.source.volume", "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.podman : Reload systemctl] ********************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:82 Saturday 18 January 2025 11:32:37 -0500 (0:00:00.850) 0:03:19.212 ****** ok: [managed-node3] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Start service] ************************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110 Saturday 18 January 2025 11:32:38 -0500 (0:00:00.780) 0:03:19.993 ****** changed: [managed-node3] => { "changed": true, "name": "quadlet-basic-unused-volume-volume.service", "state": "started", "status": { "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0", "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "sysinit.target systemd-journald.socket basic.target network-online.target system.slice -.mount", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "shutdown.target", "BindLogSockets": "no", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanLiveMount": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DebugInvocation": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "quadlet-basic-unused-volume-volume.service", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveMemoryHigh": "3698237440", "EffectiveMemoryMax": "3698237440", "EffectiveTasksMax": "22365", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman volume create --ignore systemd-quadlet-basic-unused-volume ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman volume create --ignore systemd-quadlet-basic-unused-volume ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/run/systemd/generator/quadlet-basic-unused-volume-volume.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "quadlet-basic-unused-volume-volume.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13978", "LimitNPROCSoft": "13978", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13978", "LimitSIGPENDINGSoft": "13978", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LiveMountResult": "success", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureDurationUSec": "[not set]", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3259850752", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "[not set]", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "[not set]", "MemorySwapMax": "infinity", "MemorySwapPeak": "[not set]", "MemoryZSwapCurrent": "[not set]", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "quadlet-basic-unused-volume-volume.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivatePIDs": "no", "PrivateTmp": "no", "PrivateTmpEx": "no", "PrivateUsers": "no", "PrivateUsersEx": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectControlGroupsEx": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "yes", "RemoveIPC": "no", "Requires": "sysinit.target -.mount system.slice", "RequiresMountsFor": "/run/containers", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "SourcePath": "/etc/containers/systemd/quadlet-basic-unused-volume.volume", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-basic-unused-volume-volume", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22365", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "infinity", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "oneshot", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "Wants": "network-online.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity" } } TASK [fedora.linux_system_roles.podman : Restart service] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:125 Saturday 18 January 2025 11:32:39 -0500 (0:00:00.676) 0:03:20.669 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_service_started is changed", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 18 January 2025 11:32:39 -0500 (0:00:00.045) 0:03:20.715 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": { "Container": { "ContainerName": "quadlet-basic-mysql-name", "Environment": [ "FOO=/bin/busybox-extras", "BAZ=test" ], "Image": "quay.io/linux-system-roles/mysql:5.6", "Network": "quadlet-basic.network", "PodmanArgs": "--secret=mysql_container_root_password,type=env,target=MYSQL_ROOT_PASSWORD --secret=json_secret,type=mount,target=/tmp/test.json", "Volume": "quadlet-basic-mysql.volume:/var/lib/mysql" }, "Install": { "WantedBy": "default.target" } }, "__podman_quadlet_str": "", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 18 January 2025 11:32:39 -0500 (0:00:00.056) 0:03:20.771 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "created", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 18 January 2025 11:32:39 -0500 (0:00:00.049) 0:03:20.821 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_quadlet_spec | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 18 January 2025 11:32:39 -0500 (0:00:00.034) 0:03:20.855 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-basic-mysql", "__podman_quadlet_type": "container", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 18 January 2025 11:32:39 -0500 (0:00:00.051) 0:03:20.907 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 18 January 2025 11:32:39 -0500 (0:00:00.063) 0:03:20.970 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 18 January 2025 11:32:39 -0500 (0:00:00.040) 0:03:21.010 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 18 January 2025 11:32:39 -0500 (0:00:00.038) 0:03:21.049 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 18 January 2025 11:32:39 -0500 (0:00:00.049) 0:03:21.098 ****** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1737217683.9532003, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1737217679.1402063, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9160785, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "1643853349", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 18 January 2025 11:32:40 -0500 (0:00:00.410) 0:03:21.508 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 18 January 2025 11:32:40 -0500 (0:00:00.036) 0:03:21.544 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 18 January 2025 11:32:40 -0500 (0:00:00.036) 0:03:21.581 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 18 January 2025 11:32:40 -0500 (0:00:00.035) 0:03:21.616 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 18 January 2025 11:32:40 -0500 (0:00:00.127) 0:03:21.743 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 18 January 2025 11:32:40 -0500 (0:00:00.058) 0:03:21.802 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 18 January 2025 11:32:40 -0500 (0:00:00.060) 0:03:21.862 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 18 January 2025 11:32:40 -0500 (0:00:00.060) 0:03:21.923 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 18 January 2025 11:32:40 -0500 (0:00:00.057) 0:03:21.980 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [ "quay.io/linux-system-roles/mysql:5.6" ], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-basic-mysql.service", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 18 January 2025 11:32:40 -0500 (0:00:00.080) 0:03:22.061 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 18 January 2025 11:32:40 -0500 (0:00:00.042) 0:03:22.104 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_kube_yamls_raw | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 18 January 2025 11:32:40 -0500 (0:00:00.043) 0:03:22.147 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_images": [ "quay.io/linux-system-roles/mysql:5.6" ], "__podman_quadlet_file": "/etc/containers/systemd/quadlet-basic-mysql.container", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 18 January 2025 11:32:40 -0500 (0:00:00.085) 0:03:22.233 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 18 January 2025 11:32:41 -0500 (0:00:00.042) 0:03:22.276 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_state == \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 18 January 2025 11:32:41 -0500 (0:00:00.034) 0:03:22.311 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2 Saturday 18 January 2025 11:32:41 -0500 (0:00:00.072) 0:03:22.383 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 18 January 2025 11:32:41 -0500 (0:00:00.058) 0:03:22.442 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 18 January 2025 11:32:41 -0500 (0:00:00.038) 0:03:22.480 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 18 January 2025 11:32:41 -0500 (0:00:00.052) 0:03:22.533 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create host directories] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7 Saturday 18 January 2025 11:32:41 -0500 (0:00:00.039) 0:03:22.572 ****** skipping: [managed-node3] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Ensure container images are present] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 Saturday 18 January 2025 11:32:41 -0500 (0:00:00.114) 0:03:22.687 ****** changed: [managed-node3] => (item=None) => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } changed: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39 Saturday 18 January 2025 11:32:48 -0500 (0:00:07.277) 0:03:29.964 ****** ok: [managed-node3] => { "changed": false, "gid": 0, "group": "root", "mode": "0755", "owner": "root", "path": "/etc/containers/systemd", "secontext": "system_u:object_r:etc_t:s0", "size": 155, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ******** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:48 Saturday 18 January 2025 11:32:49 -0500 (0:00:00.437) 0:03:30.402 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_quadlet_file_src | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:58 Saturday 18 January 2025 11:32:49 -0500 (0:00:00.055) 0:03:30.458 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_quadlet_str | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] ******* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:70 Saturday 18 January 2025 11:32:49 -0500 (0:00:00.058) 0:03:30.516 ****** changed: [managed-node3] => { "changed": true, "checksum": "0b6cac7929623f1059e78ef39b8b0a25169b28a6", "dest": "/etc/containers/systemd/quadlet-basic-mysql.container", "gid": 0, "group": "root", "md5sum": "1ede2d50fe62a3ca756acb50f2f6868e", "mode": "0644", "owner": "root", "secontext": "system_u:object_r:etc_t:s0", "size": 448, "src": "/root/.ansible/tmp/ansible-tmp-1737217969.326097-17547-170001703458337/.source.container", "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.podman : Reload systemctl] ********************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:82 Saturday 18 January 2025 11:32:50 -0500 (0:00:00.768) 0:03:31.285 ****** ok: [managed-node3] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Start service] ************************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110 Saturday 18 January 2025 11:32:50 -0500 (0:00:00.761) 0:03:32.047 ****** changed: [managed-node3] => { "changed": true, "name": "quadlet-basic-mysql.service", "state": "started", "status": { "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0", "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "-.mount basic.target quadlet-basic-network.service sysinit.target systemd-journald.socket quadlet-basic-mysql-volume.service network-online.target system.slice", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "shutdown.target multi-user.target", "BindLogSockets": "no", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanLiveMount": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DebugInvocation": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "yes", "DelegateControllers": "cpu cpuset io memory pids", "Description": "quadlet-basic-mysql.service", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveMemoryHigh": "3698237440", "EffectiveMemoryMax": "3698237440", "EffectiveTasksMax": "22365", "Environment": "PODMAN_SYSTEMD_UNIT=quadlet-basic-mysql.service", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman run --name quadlet-basic-mysql-name --cidfile=/run/quadlet-basic-mysql.cid --replace --rm --cgroups=split --network quadlet-basic-name --sdnotify=conmon -d -v quadlet-basic-mysql-name:/var/lib/mysql --env BAZ=test --env FOO=/bin/busybox-extras --secret=mysql_container_root_password,type=env,target=MYSQL_ROOT_PASSWORD --secret=json_secret,type=mount,target=/tmp/test.json quay.io/linux-system-roles/mysql:5.6 ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman run --name quadlet-basic-mysql-name --cidfile=/run/quadlet-basic-mysql.cid --replace --rm --cgroups=split --network quadlet-basic-name --sdnotify=conmon -d -v quadlet-basic-mysql-name:/var/lib/mysql --env BAZ=test --env FOO=/bin/busybox-extras --secret=mysql_container_root_password,type=env,target=MYSQL_ROOT_PASSWORD --secret=json_secret,type=mount,target=/tmp/test.json quay.io/linux-system-roles/mysql:5.6 ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStop": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-basic-mysql.cid ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-basic-mysql.cid ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPost": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-basic-mysql.cid ; ignore_errors=yes ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPostEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-basic-mysql.cid ; flags=ignore-failure ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/run/systemd/generator/quadlet-basic-mysql.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "quadlet-basic-mysql.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13978", "LimitNPROCSoft": "13978", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13978", "LimitSIGPENDINGSoft": "13978", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LiveMountResult": "success", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureDurationUSec": "[not set]", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3083825152", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "[not set]", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "[not set]", "MemorySwapMax": "infinity", "MemorySwapPeak": "[not set]", "MemoryZSwapCurrent": "[not set]", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "quadlet-basic-mysql.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "all", "OOMPolicy": "continue", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivatePIDs": "no", "PrivateTmp": "no", "PrivateTmpEx": "no", "PrivateUsers": "no", "PrivateUsersEx": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectControlGroupsEx": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "quadlet-basic-mysql-volume.service system.slice quadlet-basic-network.service -.mount sysinit.target", "RequiresMountsFor": "/run/containers", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "SourcePath": "/etc/containers/systemd/quadlet-basic-mysql.container", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-basic-mysql", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22365", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "WantedBy": "multi-user.target", "Wants": "network-online.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity" } } TASK [fedora.linux_system_roles.podman : Restart service] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:125 Saturday 18 January 2025 11:32:51 -0500 (0:00:00.852) 0:03:32.899 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_service_started is changed", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Cancel linger] ************************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:196 Saturday 18 January 2025 11:32:51 -0500 (0:00:00.036) 0:03:32.935 ****** skipping: [managed-node3] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Handle credential files - absent] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:202 Saturday 18 January 2025 11:32:51 -0500 (0:00:00.030) 0:03:32.965 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - absent] ******** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:211 Saturday 18 January 2025 11:32:51 -0500 (0:00:00.031) 0:03:32.997 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [Check files] ************************************************************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_basic.yml:253 Saturday 18 January 2025 11:32:51 -0500 (0:00:00.049) 0:03:33.047 ****** ok: [managed-node3] => (item=quadlet-basic-mysql.container) => { "ansible_loop_var": "item", "changed": false, "cmd": [ "cat", "/etc/containers/systemd/quadlet-basic-mysql.container" ], "delta": "0:00:00.003114", "end": "2025-01-18 11:32:52.130265", "item": "quadlet-basic-mysql.container", "rc": 0, "start": "2025-01-18 11:32:52.127151" } STDOUT: # # Ansible managed # # system_role:podman [Install] WantedBy=default.target [Container] Image=quay.io/linux-system-roles/mysql:5.6 ContainerName=quadlet-basic-mysql-name Volume=quadlet-basic-mysql.volume:/var/lib/mysql Network=quadlet-basic.network PodmanArgs=--secret=mysql_container_root_password,type=env,target=MYSQL_ROOT_PASSWORD --secret=json_secret,type=mount,target=/tmp/test.json Environment=FOO=/bin/busybox-extras Environment=BAZ=test ok: [managed-node3] => (item=quadlet-basic.network) => { "ansible_loop_var": "item", "changed": false, "cmd": [ "cat", "/etc/containers/systemd/quadlet-basic.network" ], "delta": "0:00:00.003272", "end": "2025-01-18 11:32:52.486836", "item": "quadlet-basic.network", "rc": 0, "start": "2025-01-18 11:32:52.483564" } STDOUT: [Network] Subnet=192.168.29.0/24 Gateway=192.168.29.1 Label=app=wordpress NetworkName=quadlet-basic-name ok: [managed-node3] => (item=quadlet-basic-mysql.volume) => { "ansible_loop_var": "item", "changed": false, "cmd": [ "cat", "/etc/containers/systemd/quadlet-basic-mysql.volume" ], "delta": "0:00:00.003172", "end": "2025-01-18 11:32:52.838942", "item": "quadlet-basic-mysql.volume", "rc": 0, "start": "2025-01-18 11:32:52.835770" } STDOUT: # # Ansible managed # # system_role:podman [Volume] VolumeName=quadlet-basic-mysql-name TASK [Check JSON] ************************************************************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_basic.yml:263 Saturday 18 January 2025 11:32:52 -0500 (0:00:01.131) 0:03:34.178 ****** fatal: [managed-node3]: FAILED! => { "changed": false, "cmd": [ "podman", "exec", "quadlet-basic-mysql-name", "cat", "/tmp/test.json" ], "delta": "0:00:00.032655", "end": "2025-01-18 11:32:53.289976", "failed_when_result": true, "rc": 125, "start": "2025-01-18 11:32:53.257321" } STDERR: Error: no container with name or ID "quadlet-basic-mysql-name" found: no such container MSG: non-zero return code TASK [Debug3] ****************************************************************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_basic.yml:270 Saturday 18 January 2025 11:32:53 -0500 (0:00:00.445) 0:03:34.624 ****** ok: [managed-node3] => { "changed": false, "cmd": "set -x\nset -o pipefail\nexec 1>&2\n#podman volume rm --all\n#podman network prune -f\npodman volume ls\npodman network ls\npodman secret ls\npodman container ls\npodman pod ls\npodman images\nsystemctl list-units | grep quadlet\n", "delta": "0:00:00.238544", "end": "2025-01-18 11:32:53.959546", "rc": 0, "start": "2025-01-18 11:32:53.721002" } STDERR: + set -o pipefail + exec + podman volume ls DRIVER VOLUME NAME local quadlet-basic-mysql-name local systemd-quadlet-basic-unused-volume + podman network ls NETWORK ID NAME DRIVER 2f259bab93aa podman bridge cb2e1cef89e6 quadlet-basic-name bridge 4fd92933ba9a systemd-quadlet-basic-unused-network bridge + podman secret ls ID NAME DRIVER CREATED UPDATED 28ce1923aa5eca89fdcb3d2c9 json_secret file 34 seconds ago 34 seconds ago 614e28ff7478b143c26026b3b mysql_container_root_password file 36 seconds ago 36 seconds ago + podman container ls CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES + podman pod ls POD ID NAME STATUS CREATED INFRA ID # OF CONTAINERS + podman images REPOSITORY TAG IMAGE ID CREATED SIZE quay.io/linux-system-roles/mysql 5.6 dd3b2a5dcb48 3 years ago 308 MB + systemctl list-units + grep quadlet quadlet-basic-mysql-volume.service loaded active exited quadlet-basic-mysql-volume.service ● quadlet-basic-mysql.service loaded failed failed quadlet-basic-mysql.service quadlet-basic-network.service loaded active exited quadlet-basic-network.service quadlet-basic-unused-network-network.service loaded active exited quadlet-basic-unused-network-network.service quadlet-basic-unused-volume-volume.service loaded active exited quadlet-basic-unused-volume-volume.service TASK [Check AVCs] ************************************************************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_basic.yml:286 Saturday 18 January 2025 11:32:54 -0500 (0:00:00.667) 0:03:35.292 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "grep", "type=AVC", "/var/log/audit/audit.log" ], "delta": "0:00:00.027935", "end": "2025-01-18 11:32:54.431761", "failed_when_result": false, "rc": 0, "start": "2025-01-18 11:32:54.403826" } STDOUT: type=AVC msg=audit(1737217680.558:604): avc: denied { read } for pid=7757 comm="systemd-ssh-gen" name="vsock" dev="devtmpfs" ino=264 scontext=system_u:system_r:init_t:s0 tcontext=system_u:object_r:vsock_device_t:s0 tclass=chr_file permissive=0 type=AVC msg=audit(1737217787.820:5063): avc: denied { read } for pid=27591 comm="systemd-ssh-gen" name="vsock" dev="devtmpfs" ino=264 scontext=system_u:system_r:init_t:s0 tcontext=system_u:object_r:vsock_device_t:s0 tclass=chr_file permissive=0 type=AVC msg=audit(1737217795.937:5268): avc: denied { read } for pid=28447 comm="systemd-ssh-gen" name="vsock" dev="devtmpfs" ino=264 scontext=system_u:system_r:init_t:s0 tcontext=system_u:object_r:vsock_device_t:s0 tclass=chr_file permissive=0 type=AVC msg=audit(1737217798.331:5389): avc: denied { read } for pid=29027 comm="systemd-ssh-gen" name="vsock" dev="devtmpfs" ino=264 scontext=system_u:system_r:init_t:s0 tcontext=system_u:object_r:vsock_device_t:s0 tclass=chr_file permissive=0 type=AVC msg=audit(1737217848.693:7347): avc: denied { read } for pid=37575 comm="docker-entrypoi" path="/lib/x86_64-linux-gnu/libtinfo.so.5.9" dev="xvda2" ino=419430899 scontext=system_u:system_r:container_t:s0:c591,c827 tcontext=unconfined_u:object_r:data_home_t:s0 tclass=file permissive=0 type=AVC msg=audit(1737217848.694:7348): avc: denied { read } for pid=37575 comm="docker-entrypoi" path="/lib/x86_64-linux-gnu/libdl-2.24.so" dev="xvda2" ino=419430643 scontext=system_u:system_r:container_t:s0:c591,c827 tcontext=unconfined_u:object_r:data_home_t:s0 tclass=file permissive=0 type=AVC msg=audit(1737217848.694:7349): avc: denied { read } for pid=37575 comm="docker-entrypoi" path="/lib/x86_64-linux-gnu/libc-2.24.so" dev="xvda2" ino=419430633 scontext=system_u:system_r:container_t:s0:c591,c827 tcontext=unconfined_u:object_r:data_home_t:s0 tclass=file permissive=0 type=AVC msg=audit(1737217848.694:7350): avc: denied { read } for pid=37575 comm="docker-entrypoi" path="/lib/x86_64-linux-gnu/libc-2.24.so" dev="xvda2" ino=419430633 scontext=system_u:system_r:container_t:s0:c591,c827 tcontext=unconfined_u:object_r:data_home_t:s0 tclass=file permissive=0 type=AVC msg=audit(1737217944.276:12074): avc: denied { read } for pid=57619 comm="systemd-ssh-gen" name="vsock" dev="devtmpfs" ino=264 scontext=system_u:system_r:init_t:s0 tcontext=system_u:object_r:vsock_device_t:s0 tclass=chr_file permissive=0 type=AVC msg=audit(1737217948.756:12279): avc: denied { read } for pid=58448 comm="systemd-ssh-gen" name="vsock" dev="devtmpfs" ino=264 scontext=system_u:system_r:init_t:s0 tcontext=system_u:object_r:vsock_device_t:s0 tclass=chr_file permissive=0 type=AVC msg=audit(1737217953.289:12480): avc: denied { read } for pid=59276 comm="systemd-ssh-gen" name="vsock" dev="devtmpfs" ino=264 scontext=system_u:system_r:init_t:s0 tcontext=system_u:object_r:vsock_device_t:s0 tclass=chr_file permissive=0 type=AVC msg=audit(1737217958.558:12681): avc: denied { read } for pid=60099 comm="systemd-ssh-gen" name="vsock" dev="devtmpfs" ino=264 scontext=system_u:system_r:init_t:s0 tcontext=system_u:object_r:vsock_device_t:s0 tclass=chr_file permissive=0 type=AVC msg=audit(1737217970.593:12912): avc: denied { read } for pid=61244 comm="systemd-ssh-gen" name="vsock" dev="devtmpfs" ino=264 scontext=system_u:system_r:init_t:s0 tcontext=system_u:object_r:vsock_device_t:s0 tclass=chr_file permissive=0 type=AVC msg=audit(1737217971.531:12971): avc: denied { transition } for pid=61443 comm="3" path="/usr/local/bin/docker-entrypoint.sh" dev="overlay" ino=146800887 scontext=system_u:system_r:unconfined_service_t:s0 tcontext=system_u:system_r:container_t:s0:c285,c301 tclass=process permissive=0 TASK [Dump journal] ************************************************************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_basic.yml:291 Saturday 18 January 2025 11:32:54 -0500 (0:00:00.456) 0:03:35.749 ****** fatal: [managed-node3]: FAILED! => { "changed": false, "cmd": [ "journalctl", "-ex" ], "delta": "0:00:00.057016", "end": "2025-01-18 11:32:54.909895", "failed_when_result": true, "rc": 0, "start": "2025-01-18 11:32:54.852879" } STDOUT: Jan 18 11:27:58 managed-node3 kernel: SELinux: Converting 389 SID table entries... Jan 18 11:27:58 managed-node3 kernel: SELinux: policy capability network_peer_controls=1 Jan 18 11:27:58 managed-node3 kernel: SELinux: policy capability open_perms=1 Jan 18 11:27:58 managed-node3 kernel: SELinux: policy capability extended_socket_class=1 Jan 18 11:27:58 managed-node3 kernel: SELinux: policy capability always_check_network=0 Jan 18 11:27:58 managed-node3 kernel: SELinux: policy capability cgroup_seclabel=1 Jan 18 11:27:58 managed-node3 kernel: SELinux: policy capability nnp_nosuid_transition=1 Jan 18 11:27:58 managed-node3 kernel: SELinux: policy capability genfs_seclabel_symlinks=1 Jan 18 11:27:58 managed-node3 kernel: SELinux: policy capability ioctl_skip_cloexec=0 Jan 18 11:27:58 managed-node3 kernel: SELinux: policy capability userspace_initial_context=0 Jan 18 11:27:59 managed-node3 groupadd[7305]: group added to /etc/group: name=polkitd, GID=114 Jan 18 11:27:59 managed-node3 groupadd[7305]: group added to /etc/gshadow: name=polkitd Jan 18 11:27:59 managed-node3 groupadd[7305]: new group: name=polkitd, GID=114 Jan 18 11:27:59 managed-node3 useradd[7308]: new user: name=polkitd, UID=114, GID=114, home=/, shell=/sbin/nologin, from=none Jan 18 11:27:59 managed-node3 dbus-broker-launch[628]: Noticed file-system modification, trigger reload. ░░ Subject: A configuration directory was written to ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ A write was detected to one of the directories containing D-Bus configuration ░░ files, triggering a configuration reload. ░░ ░░ This functionality exists for backwards compatibility to pick up changes to ░░ D-Bus configuration without an explicit reolad request. Typically when ░░ installing or removing third-party software causes D-Bus configuration files ░░ to be added or removed. ░░ ░░ It is worth noting that this may cause partial configuration to be loaded in ░░ case dispatching this notification races with the writing of the configuration ░░ files. However, a future notification will then cause the configuration to be ░░ reladed again. Jan 18 11:27:59 managed-node3 dbus-broker-launch[628]: Noticed file-system modification, trigger reload. ░░ Subject: A configuration directory was written to ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ A write was detected to one of the directories containing D-Bus configuration ░░ files, triggering a configuration reload. ░░ ░░ This functionality exists for backwards compatibility to pick up changes to ░░ D-Bus configuration without an explicit reolad request. Typically when ░░ installing or removing third-party software causes D-Bus configuration files ░░ to be added or removed. ░░ ░░ It is worth noting that this may cause partial configuration to be loaded in ░░ case dispatching this notification races with the writing of the configuration ░░ files. However, a future notification will then cause the configuration to be ░░ reladed again. Jan 18 11:27:59 managed-node3 systemd[1]: Listening on pcscd.socket - PC/SC Smart Card Daemon Activation Socket. ░░ Subject: A start job for unit pcscd.socket has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pcscd.socket has finished successfully. ░░ ░░ The job identifier is 1232. Jan 18 11:28:00 managed-node3 systemd[1]: Started run-p7707-i8007.service - [systemd-run] /usr/bin/systemctl start man-db-cache-update. ░░ Subject: A start job for unit run-p7707-i8007.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit run-p7707-i8007.service has finished successfully. ░░ ░░ The job identifier is 1310. Jan 18 11:28:00 managed-node3 systemd[1]: Reload requested from client PID 7711 ('systemctl') (unit session-5.scope)... Jan 18 11:28:00 managed-node3 systemd[1]: Reloading... Jan 18 11:28:00 managed-node3 systemd-rc-local-generator[7755]: /etc/rc.d/rc.local is not marked executable, skipping. Jan 18 11:28:00 managed-node3 systemd-ssh-generator[7757]: Failed to query local AF_VSOCK CID: Permission denied Jan 18 11:28:00 managed-node3 (sd-exec-[7730]: /usr/lib/systemd/system-generators/systemd-ssh-generator failed with exit status 1. Jan 18 11:28:00 managed-node3 systemd[1]: Reloading finished in 198 ms. Jan 18 11:28:00 managed-node3 systemd[1]: Starting man-db-cache-update.service... ░░ Subject: A start job for unit man-db-cache-update.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit man-db-cache-update.service has begun execution. ░░ ░░ The job identifier is 1388. Jan 18 11:28:00 managed-node3 systemd[1]: Queuing reload/restart jobs for marked units… Jan 18 11:28:00 managed-node3 systemd[1]: Reloading user@0.service - User Manager for UID 0... ░░ Subject: A reload job for unit user@0.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A reload job for unit user@0.service has begun execution. ░░ ░░ The job identifier is 1466. Jan 18 11:28:00 managed-node3 systemd[4342]: Received SIGRTMIN+25 from PID 1 (systemd). Jan 18 11:28:00 managed-node3 systemd[4342]: Reexecuting. Jan 18 11:28:00 managed-node3 systemd[1]: Reloaded user@0.service - User Manager for UID 0. ░░ Subject: A reload job for unit user@0.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A reload job for unit user@0.service has finished. ░░ ░░ The job identifier is 1466 and the job result is done. Jan 18 11:28:01 managed-node3 sudo[7196]: pam_unix(sudo:session): session closed for user root Jan 18 11:28:02 managed-node3 python3.12[8449]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:28:03 managed-node3 systemd[1]: man-db-cache-update.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit man-db-cache-update.service has successfully entered the 'dead' state. Jan 18 11:28:03 managed-node3 systemd[1]: Finished man-db-cache-update.service. ░░ Subject: A start job for unit man-db-cache-update.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit man-db-cache-update.service has finished successfully. ░░ ░░ The job identifier is 1388. Jan 18 11:28:03 managed-node3 systemd[1]: run-p7707-i8007.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-p7707-i8007.service has successfully entered the 'dead' state. Jan 18 11:28:03 managed-node3 python3.12[8590]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None Jan 18 11:28:03 managed-node3 python3.12[8722]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:28:05 managed-node3 python3.12[8855]: ansible-tempfile Invoked with state=directory prefix=lsr_podman_config_ suffix= path=None Jan 18 11:28:06 managed-node3 python3.12[8986]: ansible-ansible.legacy.command Invoked with _raw_params=tar --ignore-failed-read -c -P -v -p -f /tmp/lsr_podman_config_aipdm3o_/backup.tar /etc/containers/containers.conf.d/50-systemroles.conf /etc/containers/registries.conf.d/50-systemroles.conf /etc/containers/storage.conf /etc/containers/policy.json _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:28:06 managed-node3 python3.12[9118]: ansible-user Invoked with name=user1 state=present non_unique=False force=False remove=False create_home=True system=False move_home=False append=False ssh_key_bits=0 ssh_key_type=rsa ssh_key_comment=ansible-generated on managed-node3 update_password=always uid=None group=None groups=None comment=None home=None shell=None password=NOT_LOGGING_PARAMETER login_class=None password_expire_max=None password_expire_min=None password_expire_warn=None hidden=None seuser=None skeleton=None generate_ssh_key=None ssh_key_file=None ssh_key_passphrase=NOT_LOGGING_PARAMETER expires=None password_lock=None local=None profile=None authorization=None role=None umask=None Jan 18 11:28:06 managed-node3 useradd[9120]: new group: name=user1, GID=1000 Jan 18 11:28:06 managed-node3 useradd[9120]: new user: name=user1, UID=1000, GID=1000, home=/home/user1, shell=/bin/bash, from=/dev/pts/0 Jan 18 11:28:08 managed-node3 python3.12[9382]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:28:09 managed-node3 python3.12[9520]: ansible-getent Invoked with database=passwd key=user1 fail_key=False service=None split=None Jan 18 11:28:09 managed-node3 python3.12[9652]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:28:10 managed-node3 python3.12[9785]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:28:10 managed-node3 python3.12[9917]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:28:11 managed-node3 python3.12[10049]: ansible-file Invoked with path=/home/user1/.config/containers/containers.conf.d state=directory owner=user1 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:28:11 managed-node3 python3.12[10180]: ansible-ansible.legacy.stat Invoked with path=/home/user1/.config/containers/containers.conf.d/50-systemroles.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 18 11:28:12 managed-node3 python3.12[10285]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1737217691.626046-7795-67089311788014/.source.conf dest=/home/user1/.config/containers/containers.conf.d/50-systemroles.conf owner=user1 mode=0644 follow=False _original_basename=toml.j2 checksum=94370d6e765779f1c58daf02f667b8f0b74d91f6 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:28:12 managed-node3 python3.12[10416]: ansible-file Invoked with path=/home/user1/.config/containers/registries.conf.d state=directory owner=user1 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:28:13 managed-node3 python3.12[10547]: ansible-ansible.legacy.stat Invoked with path=/home/user1/.config/containers/registries.conf.d/50-systemroles.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 18 11:28:13 managed-node3 python3.12[10652]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1737217692.9893782-7856-194956746613415/.source.conf dest=/home/user1/.config/containers/registries.conf.d/50-systemroles.conf owner=user1 mode=0644 follow=False _original_basename=toml.j2 checksum=dfb9cd7094a81b3d1bb06512cc9b49a09c75639b backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:28:14 managed-node3 python3.12[10783]: ansible-file Invoked with path=/home/user1/.config/containers state=directory owner=user1 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:28:14 managed-node3 python3.12[10914]: ansible-ansible.legacy.stat Invoked with path=/home/user1/.config/containers/storage.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 18 11:28:14 managed-node3 python3.12[11019]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1737217694.2829256-7911-4655843759591/.source.conf dest=/home/user1/.config/containers/storage.conf owner=user1 mode=0644 follow=False _original_basename=toml.j2 checksum=d08574b6a1df63dbe1c939ff0bcc7c0b61d03044 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:28:15 managed-node3 python3.12[11150]: ansible-file Invoked with path=/home/user1/.config/containers state=directory owner=user1 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:28:15 managed-node3 python3.12[11281]: ansible-stat Invoked with path=/home/user1/.config/containers/policy.json follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:28:16 managed-node3 python3.12[11412]: ansible-ansible.legacy.stat Invoked with path=/home/user1/.config/containers/policy.json follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 18 11:28:16 managed-node3 python3.12[11517]: ansible-ansible.legacy.copy Invoked with dest=/home/user1/.config/containers/policy.json owner=user1 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1737217695.9664414-8003-34303850585786/.source.json _original_basename=.8l8_s5hc follow=False checksum=6746c079ad563b735fc39f73d4876654b80b0a0d backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:28:17 managed-node3 python3.12[11648]: ansible-stat Invoked with path=/home/user1/.config/containers/containers.conf.d/50-systemroles.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:28:17 managed-node3 python3.12[11781]: ansible-stat Invoked with path=/home/user1/.config/containers/registries.conf.d/50-systemroles.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:28:18 managed-node3 python3.12[11914]: ansible-stat Invoked with path=/home/user1/.config/containers/storage.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:28:18 managed-node3 python3.12[12047]: ansible-stat Invoked with path=/home/user1/.config/containers/policy.json follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:28:20 managed-node3 python3.12[12311]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:28:21 managed-node3 python3.12[12448]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:28:22 managed-node3 python3.12[12581]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:28:22 managed-node3 python3.12[12713]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:28:23 managed-node3 python3.12[12845]: ansible-file Invoked with path=/home/user1/.config/containers/containers.conf.d state=directory owner=user1 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:28:23 managed-node3 python3.12[12976]: ansible-ansible.legacy.stat Invoked with path=/home/user1/.config/containers/containers.conf.d/50-systemroles.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 18 11:28:24 managed-node3 python3.12[13042]: ansible-ansible.legacy.file Invoked with owner=user1 mode=0644 dest=/home/user1/.config/containers/containers.conf.d/50-systemroles.conf _original_basename=toml.j2 recurse=False state=file path=/home/user1/.config/containers/containers.conf.d/50-systemroles.conf force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:28:24 managed-node3 python3.12[13173]: ansible-file Invoked with path=/home/user1/.config/containers/registries.conf.d state=directory owner=user1 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:28:24 managed-node3 python3.12[13304]: ansible-ansible.legacy.stat Invoked with path=/home/user1/.config/containers/registries.conf.d/50-systemroles.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 18 11:28:25 managed-node3 python3.12[13370]: ansible-ansible.legacy.file Invoked with owner=user1 mode=0644 dest=/home/user1/.config/containers/registries.conf.d/50-systemroles.conf _original_basename=toml.j2 recurse=False state=file path=/home/user1/.config/containers/registries.conf.d/50-systemroles.conf force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:28:25 managed-node3 python3.12[13501]: ansible-file Invoked with path=/home/user1/.config/containers state=directory owner=user1 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:28:26 managed-node3 python3.12[13632]: ansible-ansible.legacy.stat Invoked with path=/home/user1/.config/containers/storage.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 18 11:28:26 managed-node3 python3.12[13698]: ansible-ansible.legacy.file Invoked with owner=user1 mode=0644 dest=/home/user1/.config/containers/storage.conf _original_basename=toml.j2 recurse=False state=file path=/home/user1/.config/containers/storage.conf force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:28:26 managed-node3 python3.12[13829]: ansible-file Invoked with path=/home/user1/.config/containers state=directory owner=user1 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:28:27 managed-node3 python3.12[13960]: ansible-stat Invoked with path=/home/user1/.config/containers/policy.json follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:28:27 managed-node3 python3.12[14093]: ansible-slurp Invoked with path=/home/user1/.config/containers/policy.json src=/home/user1/.config/containers/policy.json Jan 18 11:28:28 managed-node3 python3.12[14224]: ansible-stat Invoked with path=/home/user1/.config/containers/containers.conf.d/50-systemroles.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:28:28 managed-node3 python3.12[14357]: ansible-stat Invoked with path=/home/user1/.config/containers/registries.conf.d/50-systemroles.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:28:29 managed-node3 python3.12[14490]: ansible-stat Invoked with path=/home/user1/.config/containers/storage.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:28:29 managed-node3 python3.12[14623]: ansible-stat Invoked with path=/home/user1/.config/containers/policy.json follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:28:31 managed-node3 python3.12[14887]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:28:32 managed-node3 python3.12[15024]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None Jan 18 11:28:32 managed-node3 python3.12[15156]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:28:33 managed-node3 python3.12[15289]: ansible-file Invoked with path=/etc/containers/containers.conf.d state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:28:34 managed-node3 python3.12[15420]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/containers.conf.d/50-systemroles.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 18 11:28:34 managed-node3 python3.12[15525]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1737217713.847467-8676-190199256584584/.source.conf dest=/etc/containers/containers.conf.d/50-systemroles.conf owner=root mode=0644 follow=False _original_basename=toml.j2 checksum=94370d6e765779f1c58daf02f667b8f0b74d91f6 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:28:34 managed-node3 python3.12[15656]: ansible-file Invoked with path=/etc/containers/registries.conf.d state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:28:35 managed-node3 python3.12[15787]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/registries.conf.d/50-systemroles.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 18 11:28:35 managed-node3 python3.12[15892]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1737217715.0299919-8719-130608196315287/.source.conf dest=/etc/containers/registries.conf.d/50-systemroles.conf owner=root mode=0644 follow=False _original_basename=toml.j2 checksum=dfb9cd7094a81b3d1bb06512cc9b49a09c75639b backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:28:36 managed-node3 python3.12[16023]: ansible-file Invoked with path=/etc/containers state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:28:36 managed-node3 python3.12[16154]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/storage.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 18 11:28:36 managed-node3 python3.12[16259]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1737217716.2112772-8750-226469705723449/.source.conf dest=/etc/containers/storage.conf owner=root mode=0644 follow=False _original_basename=toml.j2 checksum=d08574b6a1df63dbe1c939ff0bcc7c0b61d03044 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:28:37 managed-node3 python3.12[16390]: ansible-file Invoked with path=/etc/containers state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:28:37 managed-node3 python3.12[16521]: ansible-stat Invoked with path=/etc/containers/policy.json follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:28:38 managed-node3 python3.12[16654]: ansible-slurp Invoked with path=/etc/containers/policy.json src=/etc/containers/policy.json Jan 18 11:28:38 managed-node3 python3.12[16785]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/policy.json follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 18 11:28:38 managed-node3 python3.12[16892]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/policy.json owner=root mode=0644 src=/root/.ansible/tmp/ansible-tmp-1737217718.323614-8827-219753554297918/.source.json _original_basename=.62qmgimt follow=False checksum=6746c079ad563b735fc39f73d4876654b80b0a0d backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:28:40 managed-node3 python3.12[17023]: ansible-stat Invoked with path=/etc/containers/containers.conf.d/50-systemroles.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:28:40 managed-node3 python3.12[17156]: ansible-stat Invoked with path=/etc/containers/registries.conf.d/50-systemroles.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:28:40 managed-node3 python3.12[17289]: ansible-stat Invoked with path=/etc/containers/storage.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:28:41 managed-node3 python3.12[17422]: ansible-stat Invoked with path=/etc/containers/policy.json follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:28:43 managed-node3 python3.12[17686]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:28:43 managed-node3 python3.12[17823]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:28:45 managed-node3 python3.12[17956]: ansible-file Invoked with path=/etc/containers/containers.conf.d state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:28:45 managed-node3 python3.12[18087]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/containers.conf.d/50-systemroles.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 18 11:28:45 managed-node3 python3.12[18153]: ansible-ansible.legacy.file Invoked with owner=root mode=0644 dest=/etc/containers/containers.conf.d/50-systemroles.conf _original_basename=toml.j2 recurse=False state=file path=/etc/containers/containers.conf.d/50-systemroles.conf force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:28:46 managed-node3 python3.12[18284]: ansible-file Invoked with path=/etc/containers/registries.conf.d state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:28:46 managed-node3 python3.12[18415]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/registries.conf.d/50-systemroles.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 18 11:28:47 managed-node3 python3.12[18481]: ansible-ansible.legacy.file Invoked with owner=root mode=0644 dest=/etc/containers/registries.conf.d/50-systemroles.conf _original_basename=toml.j2 recurse=False state=file path=/etc/containers/registries.conf.d/50-systemroles.conf force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:28:47 managed-node3 python3.12[18612]: ansible-file Invoked with path=/etc/containers state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:28:47 managed-node3 python3.12[18743]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/storage.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 18 11:28:48 managed-node3 python3.12[18809]: ansible-ansible.legacy.file Invoked with owner=root mode=0644 dest=/etc/containers/storage.conf _original_basename=toml.j2 recurse=False state=file path=/etc/containers/storage.conf force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:28:48 managed-node3 python3.12[18940]: ansible-file Invoked with path=/etc/containers state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:28:49 managed-node3 python3.12[19071]: ansible-stat Invoked with path=/etc/containers/policy.json follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:28:49 managed-node3 python3.12[19204]: ansible-slurp Invoked with path=/etc/containers/policy.json src=/etc/containers/policy.json Jan 18 11:28:50 managed-node3 python3.12[19335]: ansible-stat Invoked with path=/etc/containers/containers.conf.d/50-systemroles.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:28:50 managed-node3 python3.12[19468]: ansible-stat Invoked with path=/etc/containers/registries.conf.d/50-systemroles.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:28:51 managed-node3 python3.12[19601]: ansible-stat Invoked with path=/etc/containers/storage.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:28:51 managed-node3 python3.12[19734]: ansible-stat Invoked with path=/etc/containers/policy.json follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:28:52 managed-node3 python3.12[19867]: ansible-slurp Invoked with path=/home/user1/.config/containers/containers.conf.d/50-systemroles.conf src=/home/user1/.config/containers/containers.conf.d/50-systemroles.conf Jan 18 11:28:52 managed-node3 python3.12[19998]: ansible-slurp Invoked with path=/home/user1/.config/containers/registries.conf.d/50-systemroles.conf src=/home/user1/.config/containers/registries.conf.d/50-systemroles.conf Jan 18 11:28:53 managed-node3 python3.12[20129]: ansible-slurp Invoked with path=/home/user1/.config/containers/storage.conf src=/home/user1/.config/containers/storage.conf Jan 18 11:28:53 managed-node3 python3.12[20260]: ansible-slurp Invoked with path=/etc/containers/containers.conf.d/50-systemroles.conf src=/etc/containers/containers.conf.d/50-systemroles.conf Jan 18 11:28:54 managed-node3 python3.12[20391]: ansible-slurp Invoked with path=/etc/containers/registries.conf.d/50-systemroles.conf src=/etc/containers/registries.conf.d/50-systemroles.conf Jan 18 11:28:54 managed-node3 python3.12[20522]: ansible-slurp Invoked with path=/etc/containers/storage.conf src=/etc/containers/storage.conf Jan 18 11:28:55 managed-node3 python3.12[20653]: ansible-file Invoked with state=absent path=/etc/containers/containers.conf.d/50-systemroles.conf recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:28:55 managed-node3 python3.12[20784]: ansible-file Invoked with state=absent path=/etc/containers/registries.conf.d/50-systemroles.conf recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:28:56 managed-node3 python3.12[20915]: ansible-file Invoked with state=absent path=/etc/containers/storage.conf recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:28:56 managed-node3 python3.12[21046]: ansible-file Invoked with state=absent path=/etc/containers/policy.json recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:28:56 managed-node3 python3.12[21177]: ansible-file Invoked with state=absent path=/home/user1/.config/containers/containers.conf.d/50-systemroles.conf recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:28:57 managed-node3 python3.12[21308]: ansible-file Invoked with state=absent path=/home/user1/.config/containers/registries.conf.d/50-systemroles.conf recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:28:57 managed-node3 python3.12[21439]: ansible-file Invoked with state=absent path=/home/user1/.config/containers/storage.conf recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:28:57 managed-node3 python3.12[21570]: ansible-file Invoked with state=absent path=/home/user1/.config/containers/policy.json recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:28:58 managed-node3 python3.12[21701]: ansible-ansible.legacy.command Invoked with _raw_params=tar xfvpP /tmp/lsr_podman_config_aipdm3o_/backup.tar _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:28:58 managed-node3 python3.12[21833]: ansible-file Invoked with state=absent path=/tmp/lsr_podman_config_aipdm3o_ recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:29:00 managed-node3 python3.12[22007]: ansible-setup Invoked with gather_subset=['!all', '!min', 'distribution', 'distribution_major_version', 'distribution_version', 'os_family'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d Jan 18 11:29:00 managed-node3 python3.12[22140]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:29:01 managed-node3 python3.12[22271]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:29:03 managed-node3 python3.12[22533]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:29:04 managed-node3 python3.12[22670]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None Jan 18 11:29:04 managed-node3 python3.12[22802]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:29:07 managed-node3 python3.12[22978]: ansible-ansible.legacy.setup Invoked with gather_subset=['all'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d Jan 18 11:29:10 managed-node3 python3.12[23138]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:29:11 managed-node3 python3.12[23269]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:29:13 managed-node3 python3.12[23531]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:29:14 managed-node3 python3.12[23668]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None Jan 18 11:29:15 managed-node3 python3.12[23800]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:29:19 managed-node3 python3.12[23976]: ansible-ansible.legacy.setup Invoked with gather_subset=['all'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d Jan 18 11:29:20 managed-node3 python3.12[24136]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:29:21 managed-node3 python3.12[24267]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:29:23 managed-node3 python3.12[24529]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:29:23 managed-node3 python3.12[24667]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None Jan 18 11:29:24 managed-node3 python3.12[24799]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:29:26 managed-node3 python3.12[24932]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:29:28 managed-node3 python3.12[25065]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:29:28 managed-node3 python3.12[25196]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/nopull.container follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 18 11:29:29 managed-node3 python3.12[25301]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1737217768.42563-11038-258453571987208/.source.container dest=/etc/containers/systemd/nopull.container owner=root group=0 mode=0644 follow=False _original_basename=systemd.j2 checksum=670d64fc68a9768edb20cad26df2acc703542d85 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:29:31 managed-node3 python3.12[25563]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:29:32 managed-node3 python3.12[25700]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:29:34 managed-node3 python3.12[25833]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:29:36 managed-node3 systemd[1]: var-lib-containers-storage-overlay-compat3595801478-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-compat3595801478-merged.mount has successfully entered the 'dead' state. Jan 18 11:29:36 managed-node3 kernel: evm: overlay not supported Jan 18 11:29:36 managed-node3 podman[25967]: 2025-01-18 11:29:36.558626691 -0500 EST m=+0.076193941 system refresh Jan 18 11:29:36 managed-node3 podman[25976]: 2025-01-18 11:29:36.779076745 -0500 EST m=+0.112657046 image pull-error this_is_a_bogus_image:latest short-name resolution enforced but cannot prompt without a TTY Jan 18 11:29:37 managed-node3 python3.12[26114]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:29:37 managed-node3 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:29:37 managed-node3 python3.12[26245]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/bogus.container follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 18 11:29:38 managed-node3 python3.12[26350]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1737217777.5201657-11373-52709709939833/.source.container dest=/etc/containers/systemd/bogus.container owner=root group=0 mode=0644 follow=False _original_basename=systemd.j2 checksum=1d087e679d135214e8ac9ccaf33b2222916efb7f backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:29:40 managed-node3 python3.12[26612]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:29:41 managed-node3 python3.12[26749]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:29:44 managed-node3 python3.12[26882]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:29:45 managed-node3 python3.12[27015]: ansible-systemd Invoked with name=nopull.service scope=system state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None Jan 18 11:29:45 managed-node3 python3.12[27147]: ansible-stat Invoked with path=/etc/containers/systemd/nopull.container follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:29:47 managed-node3 python3.12[27411]: ansible-file Invoked with path=/etc/containers/systemd/nopull.container state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:29:47 managed-node3 python3.12[27542]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 18 11:29:47 managed-node3 systemd[1]: Reload requested from client PID 27543 ('systemctl') (unit session-5.scope)... Jan 18 11:29:47 managed-node3 systemd[1]: Reloading... Jan 18 11:29:47 managed-node3 systemd-rc-local-generator[27588]: /etc/rc.d/rc.local is not marked executable, skipping. Jan 18 11:29:47 managed-node3 quadlet-generator[27565]: Warning: bogus.container specifies the image "this_is_a_bogus_image" which not a fully qualified image name. This is not ideal for performance and security reasons. See the podman-pull manpage discussion of short-name-aliases.conf for details. Jan 18 11:29:47 managed-node3 systemd-ssh-generator[27591]: Failed to query local AF_VSOCK CID: Permission denied Jan 18 11:29:47 managed-node3 (sd-exec-[27561]: /usr/lib/systemd/system-generators/systemd-ssh-generator failed with exit status 1. Jan 18 11:29:47 managed-node3 systemd[1]: Reloading finished in 200 ms. Jan 18 11:29:48 managed-node3 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:29:50 managed-node3 python3.12[27996]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:29:51 managed-node3 python3.12[28133]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:29:54 managed-node3 python3.12[28266]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:29:55 managed-node3 python3.12[28399]: ansible-systemd Invoked with name=bogus.service scope=system state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None Jan 18 11:29:55 managed-node3 systemd[1]: Reload requested from client PID 28402 ('systemctl') (unit session-5.scope)... Jan 18 11:29:55 managed-node3 systemd[1]: Reloading... Jan 18 11:29:55 managed-node3 systemd-rc-local-generator[28445]: /etc/rc.d/rc.local is not marked executable, skipping. Jan 18 11:29:55 managed-node3 quadlet-generator[28424]: Warning: bogus.container specifies the image "this_is_a_bogus_image" which not a fully qualified image name. This is not ideal for performance and security reasons. See the podman-pull manpage discussion of short-name-aliases.conf for details. Jan 18 11:29:55 managed-node3 systemd-ssh-generator[28447]: Failed to query local AF_VSOCK CID: Permission denied Jan 18 11:29:55 managed-node3 (sd-exec-[28420]: /usr/lib/systemd/system-generators/systemd-ssh-generator failed with exit status 1. Jan 18 11:29:56 managed-node3 systemd[1]: Reloading finished in 199 ms. Jan 18 11:29:56 managed-node3 python3.12[28585]: ansible-stat Invoked with path=/etc/containers/systemd/bogus.container follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:29:57 managed-node3 python3.12[28849]: ansible-file Invoked with path=/etc/containers/systemd/bogus.container state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:29:58 managed-node3 python3.12[28980]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 18 11:29:58 managed-node3 systemd[1]: Reload requested from client PID 28981 ('systemctl') (unit session-5.scope)... Jan 18 11:29:58 managed-node3 systemd[1]: Reloading... Jan 18 11:29:58 managed-node3 systemd-rc-local-generator[29024]: /etc/rc.d/rc.local is not marked executable, skipping. Jan 18 11:29:58 managed-node3 (sd-exec-[28999]: /usr/lib/systemd/system-generators/systemd-ssh-generator failed with exit status 1. Jan 18 11:29:58 managed-node3 systemd-ssh-generator[29027]: Failed to query local AF_VSOCK CID: Permission denied Jan 18 11:29:58 managed-node3 systemd[1]: Reloading finished in 195 ms. Jan 18 11:29:58 managed-node3 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:30:00 managed-node3 python3.12[29302]: ansible-user Invoked with name=user_quadlet_basic uid=1111 state=present non_unique=False force=False remove=False create_home=True system=False move_home=False append=False ssh_key_bits=0 ssh_key_type=rsa ssh_key_comment=ansible-generated on managed-node3 update_password=always group=None groups=None comment=None home=None shell=None password=NOT_LOGGING_PARAMETER login_class=None password_expire_max=None password_expire_min=None password_expire_warn=None hidden=None seuser=None skeleton=None generate_ssh_key=None ssh_key_file=None ssh_key_passphrase=NOT_LOGGING_PARAMETER expires=None password_lock=None local=None profile=None authorization=None role=None umask=None Jan 18 11:30:00 managed-node3 useradd[29304]: new group: name=user_quadlet_basic, GID=1111 Jan 18 11:30:00 managed-node3 useradd[29304]: new user: name=user_quadlet_basic, UID=1111, GID=1111, home=/home/user_quadlet_basic, shell=/bin/bash, from=/dev/pts/0 Jan 18 11:30:03 managed-node3 python3.12[29566]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:30:04 managed-node3 python3.12[29704]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:30:06 managed-node3 python3.12[29837]: ansible-getent Invoked with database=passwd key=user_quadlet_basic fail_key=False service=None split=None Jan 18 11:30:07 managed-node3 python3.12[29969]: ansible-ansible.legacy.command Invoked with creates=/var/lib/systemd/linger/user_quadlet_basic _raw_params=loginctl enable-linger user_quadlet_basic _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None removes=None stdin=None Jan 18 11:30:07 managed-node3 systemd[1]: Created slice user-1111.slice - User Slice of UID 1111. ░░ Subject: A start job for unit user-1111.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user-1111.slice has finished successfully. ░░ ░░ The job identifier is 1468. Jan 18 11:30:07 managed-node3 systemd[1]: Starting user-runtime-dir@1111.service - User Runtime Directory /run/user/1111... ░░ Subject: A start job for unit user-runtime-dir@1111.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user-runtime-dir@1111.service has begun execution. ░░ ░░ The job identifier is 1467. Jan 18 11:30:07 managed-node3 systemd[1]: Finished user-runtime-dir@1111.service - User Runtime Directory /run/user/1111. ░░ Subject: A start job for unit user-runtime-dir@1111.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user-runtime-dir@1111.service has finished successfully. ░░ ░░ The job identifier is 1467. Jan 18 11:30:07 managed-node3 systemd[1]: Starting user@1111.service - User Manager for UID 1111... ░░ Subject: A start job for unit user@1111.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user@1111.service has begun execution. ░░ ░░ The job identifier is 1547. Jan 18 11:30:07 managed-node3 systemd-logind[646]: New session 6 of user user_quadlet_basic. ░░ Subject: A new session 6 has been created for user user_quadlet_basic ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A new session with the ID 6 has been created for the user user_quadlet_basic. ░░ ░░ The leading process of the session is 29973. Jan 18 11:30:07 managed-node3 (systemd)[29973]: pam_unix(systemd-user:session): session opened for user user_quadlet_basic(uid=1111) by user_quadlet_basic(uid=0) Jan 18 11:30:07 managed-node3 systemd[29973]: Queued start job for default target default.target. Jan 18 11:30:07 managed-node3 systemd[29973]: Created slice app.slice - User Application Slice. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 5. Jan 18 11:30:07 managed-node3 systemd[29973]: Started grub-boot-success.timer - Mark boot as successful after the user session has run 2 minutes. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 9. Jan 18 11:30:07 managed-node3 systemd[29973]: Started systemd-tmpfiles-clean.timer - Daily Cleanup of User's Temporary Directories. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 10. Jan 18 11:30:07 managed-node3 systemd[29973]: Reached target paths.target - Paths. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 11. Jan 18 11:30:07 managed-node3 systemd[29973]: Reached target timers.target - Timers. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 8. Jan 18 11:30:07 managed-node3 systemd[29973]: Starting dbus.socket - D-Bus User Message Bus Socket... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 4. Jan 18 11:30:07 managed-node3 systemd[29973]: Starting systemd-tmpfiles-setup.service - Create User Files and Directories... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 12. Jan 18 11:30:07 managed-node3 systemd[29973]: Listening on dbus.socket - D-Bus User Message Bus Socket. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 4. Jan 18 11:30:07 managed-node3 systemd[29973]: Finished systemd-tmpfiles-setup.service - Create User Files and Directories. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 12. Jan 18 11:30:07 managed-node3 systemd[29973]: Reached target sockets.target - Sockets. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 3. Jan 18 11:30:07 managed-node3 systemd[29973]: Reached target basic.target - Basic System. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 2. Jan 18 11:30:07 managed-node3 systemd[29973]: Reached target default.target - Main User Target. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 1. Jan 18 11:30:07 managed-node3 systemd[29973]: Startup finished in 76ms. ░░ Subject: User manager start-up is now complete ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The user manager instance for user 1111 has been started. All services queued ░░ for starting have been started. Note that other services might still be starting ░░ up or be started at any later time. ░░ ░░ Startup of the manager took 76438 microseconds. Jan 18 11:30:07 managed-node3 systemd[1]: Started user@1111.service - User Manager for UID 1111. ░░ Subject: A start job for unit user@1111.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user@1111.service has finished successfully. ░░ ░░ The job identifier is 1547. Jan 18 11:30:07 managed-node3 python3.12[30119]: ansible-stat Invoked with path=/run/user/1111 follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:30:08 managed-node3 sudo[30294]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-umeisolrvvgtfxvdiqyflzwxbyuoyipk ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1737217808.078978-12430-112243058797143/AnsiballZ_podman_secret.py' Jan 18 11:30:08 managed-node3 sudo[30294]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 18 11:30:08 managed-node3 systemd[29973]: Created slice session.slice - User Core Session Slice. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 14. Jan 18 11:30:08 managed-node3 systemd[29973]: Starting dbus-broker.service - D-Bus User Message Bus... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 13. Jan 18 11:30:08 managed-node3 dbus-broker-launch[30325]: Policy to allow eavesdropping in /usr/share/dbus-1/session.conf +31: Eavesdropping is deprecated and ignored Jan 18 11:30:08 managed-node3 dbus-broker-launch[30325]: Policy to allow eavesdropping in /usr/share/dbus-1/session.conf +33: Eavesdropping is deprecated and ignored Jan 18 11:30:08 managed-node3 systemd[29973]: Started dbus-broker.service - D-Bus User Message Bus. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 13. Jan 18 11:30:08 managed-node3 dbus-broker-launch[30325]: Ready Jan 18 11:30:08 managed-node3 systemd[29973]: Created slice user.slice - Slice /user. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 20. Jan 18 11:30:08 managed-node3 systemd[29973]: Started podman-30310.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 19. Jan 18 11:30:08 managed-node3 systemd[29973]: Started podman-pause-10d5552c.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 23. Jan 18 11:30:08 managed-node3 systemd[29973]: Started podman-30328.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 27. Jan 18 11:30:08 managed-node3 systemd[29973]: Started podman-30335.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 31. Jan 18 11:30:08 managed-node3 sudo[30294]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 18 11:30:10 managed-node3 python3.12[30473]: ansible-ansible.legacy.command Invoked with creates=/var/lib/systemd/linger/user_quadlet_basic _raw_params=loginctl enable-linger user_quadlet_basic _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None removes=None stdin=None Jan 18 11:30:10 managed-node3 python3.12[30604]: ansible-stat Invoked with path=/run/user/1111 follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:30:10 managed-node3 sudo[30779]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-lhmmhofrxjxlbteprfsvdkmtiqnbzcfg ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1737217810.620859-12519-245394054648340/AnsiballZ_podman_secret.py' Jan 18 11:30:10 managed-node3 sudo[30779]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 18 11:30:11 managed-node3 systemd[29973]: Started podman-30789.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 35. Jan 18 11:30:11 managed-node3 systemd[29973]: Started podman-30796.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 39. Jan 18 11:30:11 managed-node3 systemd[29973]: Started podman-30803.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 43. Jan 18 11:30:12 managed-node3 sudo[30779]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 18 11:30:13 managed-node3 python3.12[30942]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:30:13 managed-node3 python3.12[31075]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user_quadlet_basic _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:30:13 managed-node3 python3.12[31207]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user_quadlet_basic _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:30:15 managed-node3 python3.12[31339]: ansible-ansible.legacy.command Invoked with creates=/var/lib/systemd/linger/user_quadlet_basic _raw_params=loginctl enable-linger user_quadlet_basic _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None removes=None stdin=None Jan 18 11:30:15 managed-node3 python3.12[31470]: ansible-file Invoked with path=/home/user_quadlet_basic/.config/containers/systemd state=directory owner=user_quadlet_basic group=1111 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:30:16 managed-node3 python3.12[31601]: ansible-ansible.legacy.stat Invoked with path=/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic.network follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 18 11:30:16 managed-node3 python3.12[31706]: ansible-ansible.legacy.copy Invoked with dest=/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic.network owner=user_quadlet_basic group=1111 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1737217815.9402516-12676-68902585171670/.source.network _original_basename=.ccyx7ll5 follow=False checksum=19c9b17be2af9b9deca5c3bd327f048966750682 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:30:16 managed-node3 sudo[31879]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ffrusbyvqzqslxnvrzndzjdxfenofemc ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1737217816.7060785-12704-66162720041396/AnsiballZ_systemd.py' Jan 18 11:30:16 managed-node3 sudo[31879]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 18 11:30:17 managed-node3 python3.12[31882]: ansible-systemd Invoked with daemon_reload=True scope=user daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 18 11:30:17 managed-node3 systemd[29973]: Reload requested from client PID 31883 ('systemctl')... Jan 18 11:30:17 managed-node3 systemd[29973]: Reloading... Jan 18 11:30:17 managed-node3 systemd[29973]: Reloading finished in 39 ms. Jan 18 11:30:17 managed-node3 sudo[31879]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 18 11:30:17 managed-node3 sudo[32065]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-xbogsjxighptbmojqvcfpxmizygubsfp ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1737217817.390173-12737-95820283671063/AnsiballZ_systemd.py' Jan 18 11:30:17 managed-node3 sudo[32065]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 18 11:30:17 managed-node3 python3.12[32068]: ansible-systemd Invoked with name=quadlet-basic-network.service scope=user state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Jan 18 11:30:17 managed-node3 systemd[29973]: Starting podman-user-wait-network-online.service - Wait for system level network-online.target as user.... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 59. Jan 18 11:30:17 managed-node3 sh[32072]: active Jan 18 11:30:17 managed-node3 systemd[29973]: Finished podman-user-wait-network-online.service - Wait for system level network-online.target as user.. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 59. Jan 18 11:30:17 managed-node3 systemd[29973]: Starting quadlet-basic-network.service... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 47. Jan 18 11:30:17 managed-node3 quadlet-basic-network[32074]: quadlet-basic-name Jan 18 11:30:17 managed-node3 systemd[29973]: Finished quadlet-basic-network.service. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 47. Jan 18 11:30:17 managed-node3 sudo[32065]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 18 11:30:18 managed-node3 python3.12[32212]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:30:19 managed-node3 python3.12[32345]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user_quadlet_basic _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:30:19 managed-node3 python3.12[32477]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user_quadlet_basic _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:30:20 managed-node3 python3.12[32609]: ansible-ansible.legacy.command Invoked with creates=/var/lib/systemd/linger/user_quadlet_basic _raw_params=loginctl enable-linger user_quadlet_basic _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None removes=None stdin=None Jan 18 11:30:21 managed-node3 python3.12[32740]: ansible-file Invoked with path=/home/user_quadlet_basic/.config/containers/systemd state=directory owner=user_quadlet_basic group=1111 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:30:21 managed-node3 python3.12[32871]: ansible-ansible.legacy.stat Invoked with path=/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-unused-network.network follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 18 11:30:22 managed-node3 python3.12[32976]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1737217821.6253436-12902-156234989351049/.source.network dest=/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-unused-network.network owner=user_quadlet_basic group=1111 mode=0644 follow=False _original_basename=systemd.j2 checksum=52c9d75ecaf81203cc1f1a3b1dd00fcd25067b01 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:30:22 managed-node3 sudo[33149]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ylockthfdtuenyikiltlskpyjulbgejb ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1737217822.373105-12931-264076592435920/AnsiballZ_systemd.py' Jan 18 11:30:22 managed-node3 sudo[33149]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 18 11:30:22 managed-node3 python3.12[33152]: ansible-systemd Invoked with daemon_reload=True scope=user daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 18 11:30:22 managed-node3 systemd[29973]: Reload requested from client PID 33153 ('systemctl')... Jan 18 11:30:22 managed-node3 systemd[29973]: Reloading... Jan 18 11:30:22 managed-node3 systemd[29973]: Reloading finished in 41 ms. Jan 18 11:30:22 managed-node3 sudo[33149]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 18 11:30:23 managed-node3 sudo[33335]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-npknsjoikxsszhrqnzyppjkyzvshpakx ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1737217823.056153-12951-172360745939526/AnsiballZ_systemd.py' Jan 18 11:30:23 managed-node3 sudo[33335]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 18 11:30:23 managed-node3 python3.12[33338]: ansible-systemd Invoked with name=quadlet-basic-unused-network-network.service scope=user state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Jan 18 11:30:23 managed-node3 systemd[29973]: Starting quadlet-basic-unused-network-network.service... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 60. Jan 18 11:30:23 managed-node3 quadlet-basic-unused-network-network[33341]: systemd-quadlet-basic-unused-network Jan 18 11:30:23 managed-node3 systemd[29973]: Finished quadlet-basic-unused-network-network.service. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 60. Jan 18 11:30:23 managed-node3 sudo[33335]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 18 11:30:24 managed-node3 python3.12[33479]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:30:24 managed-node3 python3.12[33612]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user_quadlet_basic _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:30:25 managed-node3 python3.12[33744]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user_quadlet_basic _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:30:26 managed-node3 python3.12[33876]: ansible-ansible.legacy.command Invoked with creates=/var/lib/systemd/linger/user_quadlet_basic _raw_params=loginctl enable-linger user_quadlet_basic _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None removes=None stdin=None Jan 18 11:30:27 managed-node3 python3.12[34007]: ansible-file Invoked with path=/home/user_quadlet_basic/.config/containers/systemd state=directory owner=user_quadlet_basic group=1111 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:30:27 managed-node3 python3.12[34138]: ansible-ansible.legacy.stat Invoked with path=/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-mysql.volume follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 18 11:30:28 managed-node3 python3.12[34243]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1737217827.3964999-13194-44993693918340/.source.volume dest=/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-mysql.volume owner=user_quadlet_basic group=1111 mode=0644 follow=False _original_basename=systemd.j2 checksum=90a3571bfc7670328fe3f8fb625585613dbd9c4a backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:30:28 managed-node3 sudo[34416]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-mdcmzxbkdaafnoocwklzwhqovhmkcslo ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1737217828.2437115-13294-89447295939955/AnsiballZ_systemd.py' Jan 18 11:30:28 managed-node3 sudo[34416]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 18 11:30:28 managed-node3 python3.12[34419]: ansible-systemd Invoked with daemon_reload=True scope=user daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 18 11:30:28 managed-node3 systemd[29973]: Reload requested from client PID 34420 ('systemctl')... Jan 18 11:30:28 managed-node3 systemd[29973]: Reloading... Jan 18 11:30:28 managed-node3 systemd[29973]: Reloading finished in 40 ms. Jan 18 11:30:28 managed-node3 sudo[34416]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 18 11:30:29 managed-node3 sudo[34602]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-qrddixnbceszjzdpncervnxhnrumurxf ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1737217828.9102046-13310-221334588286355/AnsiballZ_systemd.py' Jan 18 11:30:29 managed-node3 sudo[34602]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 18 11:30:29 managed-node3 python3.12[34605]: ansible-systemd Invoked with name=quadlet-basic-mysql-volume.service scope=user state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Jan 18 11:30:29 managed-node3 systemd[29973]: Starting quadlet-basic-mysql-volume.service... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 73. Jan 18 11:30:29 managed-node3 quadlet-basic-mysql-volume[34608]: quadlet-basic-mysql-name Jan 18 11:30:29 managed-node3 systemd[29973]: Finished quadlet-basic-mysql-volume.service. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 73. Jan 18 11:30:29 managed-node3 sudo[34602]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 18 11:30:30 managed-node3 python3.12[34747]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:30:30 managed-node3 python3.12[34880]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user_quadlet_basic _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:30:31 managed-node3 python3.12[35012]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user_quadlet_basic _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:30:32 managed-node3 python3.12[35144]: ansible-ansible.legacy.command Invoked with creates=/var/lib/systemd/linger/user_quadlet_basic _raw_params=loginctl enable-linger user_quadlet_basic _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None removes=None stdin=None Jan 18 11:30:32 managed-node3 python3.12[35275]: ansible-file Invoked with path=/home/user_quadlet_basic/.config/containers/systemd state=directory owner=user_quadlet_basic group=1111 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:30:33 managed-node3 python3.12[35406]: ansible-ansible.legacy.stat Invoked with path=/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-unused-volume.volume follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 18 11:30:33 managed-node3 python3.12[35511]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1737217832.9710007-13447-47959490018625/.source.volume dest=/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-unused-volume.volume owner=user_quadlet_basic group=1111 mode=0644 follow=False _original_basename=systemd.j2 checksum=fd0ae560360afa5541b866560b1e849d25e216ef backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:30:33 managed-node3 sudo[35684]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-qkjyyaunoqstojzrmtjzlynlbdnkners ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1737217833.7080786-13469-240810131860909/AnsiballZ_systemd.py' Jan 18 11:30:33 managed-node3 sudo[35684]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 18 11:30:34 managed-node3 python3.12[35687]: ansible-systemd Invoked with daemon_reload=True scope=user daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 18 11:30:34 managed-node3 systemd[29973]: Reload requested from client PID 35688 ('systemctl')... Jan 18 11:30:34 managed-node3 systemd[29973]: Reloading... Jan 18 11:30:34 managed-node3 systemd[29973]: Reloading finished in 42 ms. Jan 18 11:30:34 managed-node3 sudo[35684]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 18 11:30:34 managed-node3 sudo[35870]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-mwhccshuvddaawbhglbyohztonffyigi ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1737217834.3610559-13479-19889296637469/AnsiballZ_systemd.py' Jan 18 11:30:34 managed-node3 sudo[35870]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 18 11:30:34 managed-node3 python3.12[35873]: ansible-systemd Invoked with name=quadlet-basic-unused-volume-volume.service scope=user state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Jan 18 11:30:34 managed-node3 systemd[29973]: Starting quadlet-basic-unused-volume-volume.service... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 86. Jan 18 11:30:34 managed-node3 quadlet-basic-unused-volume-volume[35876]: systemd-quadlet-basic-unused-volume Jan 18 11:30:34 managed-node3 systemd[29973]: Finished quadlet-basic-unused-volume-volume.service. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 86. Jan 18 11:30:34 managed-node3 sudo[35870]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 18 11:30:35 managed-node3 python3.12[36016]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:30:36 managed-node3 python3.12[36149]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user_quadlet_basic _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:30:36 managed-node3 python3.12[36281]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user_quadlet_basic _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:30:37 managed-node3 python3.12[36413]: ansible-ansible.legacy.command Invoked with creates=/var/lib/systemd/linger/user_quadlet_basic _raw_params=loginctl enable-linger user_quadlet_basic _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None removes=None stdin=None Jan 18 11:30:38 managed-node3 sudo[36586]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-niolwfuzazbmpwoauehqnbbvgtkvuwkn ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1737217837.9692688-13607-204586416753542/AnsiballZ_podman_image.py' Jan 18 11:30:38 managed-node3 sudo[36586]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 18 11:30:38 managed-node3 systemd[29973]: Started podman-36590.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 99. Jan 18 11:30:38 managed-node3 systemd[29973]: Started podman-36597.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 103. Jan 18 11:30:45 managed-node3 systemd[29973]: podman-36597.scope: Consumed 8.615s CPU time, 483.4M memory peak. ░░ Subject: Resources consumed by unit runtime ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit UNIT completed and consumed the indicated resources. Jan 18 11:30:45 managed-node3 systemd[29973]: Started podman-36772.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 107. Jan 18 11:30:45 managed-node3 sudo[36586]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 18 11:30:46 managed-node3 python3.12[36909]: ansible-file Invoked with path=/home/user_quadlet_basic/.config/containers/systemd state=directory owner=user_quadlet_basic group=1111 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:30:46 managed-node3 python3.12[37040]: ansible-ansible.legacy.stat Invoked with path=/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-mysql.container follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 18 11:30:46 managed-node3 python3.12[37145]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1737217846.314918-13836-218565630821699/.source.container dest=/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-mysql.container owner=user_quadlet_basic group=1111 mode=0644 follow=False _original_basename=systemd.j2 checksum=0b6cac7929623f1059e78ef39b8b0a25169b28a6 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:30:47 managed-node3 sudo[37318]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ncuatcbgmhfklesofcjmfirikdqxrrit ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1737217847.1419275-13869-69380127150956/AnsiballZ_systemd.py' Jan 18 11:30:47 managed-node3 sudo[37318]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 18 11:30:47 managed-node3 python3.12[37321]: ansible-systemd Invoked with daemon_reload=True scope=user daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 18 11:30:47 managed-node3 systemd[29973]: Reload requested from client PID 37322 ('systemctl')... Jan 18 11:30:47 managed-node3 systemd[29973]: Reloading... Jan 18 11:30:47 managed-node3 systemd[29973]: Reloading finished in 43 ms. Jan 18 11:30:47 managed-node3 sudo[37318]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 18 11:30:48 managed-node3 sudo[37504]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-zavwasldfwiwtbsgfmaaofrewavcjzdo ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1737217847.796129-13886-154124132000088/AnsiballZ_systemd.py' Jan 18 11:30:48 managed-node3 sudo[37504]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 18 11:30:48 managed-node3 python3.12[37507]: ansible-systemd Invoked with name=quadlet-basic-mysql.service scope=user state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Jan 18 11:30:48 managed-node3 systemd[29973]: Starting quadlet-basic-mysql.service... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 111. Jan 18 11:30:48 managed-node3 kernel: tun: Universal TUN/TAP device driver, 1.6 Jan 18 11:30:48 managed-node3 systemd[29973]: Started rootless-netns-0922b794.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 126. Jan 18 11:30:48 managed-node3 kernel: bridge: filtering via arp/ip/ip6tables is no longer available by default. Update your scripts to load br_netfilter if you need this. Jan 18 11:30:48 managed-node3 kernel: podman1: port 1(veth0) entered blocking state Jan 18 11:30:48 managed-node3 kernel: podman1: port 1(veth0) entered disabled state Jan 18 11:30:48 managed-node3 kernel: veth0: entered allmulticast mode Jan 18 11:30:48 managed-node3 kernel: veth0: entered promiscuous mode Jan 18 11:30:48 managed-node3 kernel: podman1: port 1(veth0) entered blocking state Jan 18 11:30:48 managed-node3 kernel: podman1: port 1(veth0) entered forwarding state Jan 18 11:30:48 managed-node3 systemd[29973]: Started run-p37567-i37867.scope - [systemd-run] /usr/libexec/podman/aardvark-dns --config /run/user/1111/containers/networks/aardvark-dns -p 53 run. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 130. Jan 18 11:30:48 managed-node3 systemd[29973]: Started quadlet-basic-mysql.service. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 111. Jan 18 11:30:48 managed-node3 quadlet-basic-mysql[37510]: 8e7f6ff7cf333588d83a12da396caaf790b5bbacb07cdf0ecde9f528fdcdc805 Jan 18 11:30:48 managed-node3 sudo[37504]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 18 11:30:48 managed-node3 kernel: podman1: port 1(veth0) entered disabled state Jan 18 11:30:48 managed-node3 kernel: veth0 (unregistering): left allmulticast mode Jan 18 11:30:48 managed-node3 kernel: veth0 (unregistering): left promiscuous mode Jan 18 11:30:48 managed-node3 kernel: podman1: port 1(veth0) entered disabled state Jan 18 11:30:48 managed-node3 systemd[29973]: quadlet-basic-mysql.service: Main process exited, code=exited, status=127/n/a ░░ Subject: Unit process exited ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ An ExecStart= process belonging to unit UNIT has exited. ░░ ░░ The process' exit code is 'exited' and its exit status is 127. Jan 18 11:30:48 managed-node3 systemd[29973]: quadlet-basic-mysql.service: Failed with result 'exit-code'. ░░ Subject: Unit failed ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit UNIT has entered the 'failed' state with result 'exit-code'. Jan 18 11:30:49 managed-node3 python3.12[37734]: ansible-ansible.legacy.command Invoked with _raw_params=cat /home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-mysql.container _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:30:49 managed-node3 python3.12[37866]: ansible-ansible.legacy.command Invoked with _raw_params=cat /home/user_quadlet_basic/.config/containers/systemd/quadlet-basic.network _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:30:49 managed-node3 python3.12[37998]: ansible-ansible.legacy.command Invoked with _raw_params=cat /home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-mysql.volume _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:30:51 managed-node3 python3.12[38130]: ansible-stat Invoked with path=/var/lib/systemd/linger/user_quadlet_basic follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:30:53 managed-node3 python3.12[38394]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:30:55 managed-node3 python3.12[38531]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:30:55 managed-node3 python3.12[38664]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user_quadlet_basic _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:30:55 managed-node3 python3.12[38796]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user_quadlet_basic _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:30:58 managed-node3 python3.12[38928]: ansible-stat Invoked with path=/run/user/1111 follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:30:59 managed-node3 sudo[39103]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-txufmeefvivcjewwgqerwslkdtbyohqw ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1737217859.089059-14317-50104999237463/AnsiballZ_podman_secret.py' Jan 18 11:30:59 managed-node3 sudo[39103]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 18 11:30:59 managed-node3 systemd[29973]: Started podman-39107.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 134. Jan 18 11:30:59 managed-node3 sudo[39103]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 18 11:31:00 managed-node3 python3.12[39244]: ansible-stat Invoked with path=/run/user/1111 follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:31:01 managed-node3 sudo[39419]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ykeyfcdyyhxvdvuvkgntokspueblibch ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1737217860.8441157-14382-99316237083011/AnsiballZ_podman_secret.py' Jan 18 11:31:01 managed-node3 sudo[39419]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 18 11:31:01 managed-node3 systemd[29973]: Started podman-39423.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 138. Jan 18 11:31:01 managed-node3 sudo[39419]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 18 11:31:02 managed-node3 python3.12[39561]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:31:02 managed-node3 python3.12[39694]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user_quadlet_basic _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:31:03 managed-node3 python3.12[39826]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user_quadlet_basic _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:31:04 managed-node3 python3.12[39958]: ansible-stat Invoked with path=/run/user/1111 follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:31:04 managed-node3 sudo[40133]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ytimerixsuasfygdgimunsrkbejjyqyb ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1737217864.4561236-14544-216335676738230/AnsiballZ_systemd.py' Jan 18 11:31:04 managed-node3 sudo[40133]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 18 11:31:04 managed-node3 python3.12[40136]: ansible-systemd Invoked with name=quadlet-basic-mysql.service scope=user state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None Jan 18 11:31:04 managed-node3 systemd[29973]: Reload requested from client PID 40139 ('systemctl')... Jan 18 11:31:04 managed-node3 systemd[29973]: Reloading... Jan 18 11:31:05 managed-node3 systemd[29973]: Reloading finished in 44 ms. Jan 18 11:31:05 managed-node3 sudo[40133]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 18 11:31:05 managed-node3 python3.12[40280]: ansible-stat Invoked with path=/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-mysql.container follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:31:06 managed-node3 python3.12[40544]: ansible-file Invoked with path=/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-mysql.container state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:31:06 managed-node3 sudo[40717]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-bfdbkbjlnjstzgfjsdduhiporrdfxdac ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1737217866.601487-14629-1472345209737/AnsiballZ_systemd.py' Jan 18 11:31:06 managed-node3 sudo[40717]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 18 11:31:07 managed-node3 python3.12[40720]: ansible-systemd Invoked with daemon_reload=True scope=user daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 18 11:31:07 managed-node3 systemd[29973]: Reload requested from client PID 40721 ('systemctl')... Jan 18 11:31:07 managed-node3 systemd[29973]: Reloading... Jan 18 11:31:07 managed-node3 systemd[29973]: Reloading finished in 42 ms. Jan 18 11:31:07 managed-node3 sudo[40717]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 18 11:31:07 managed-node3 sudo[40904]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-xnzsbuclaatkdpkncvuuynxrrpziitir ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1737217867.314502-14663-257675565645950/AnsiballZ_command.py' Jan 18 11:31:07 managed-node3 sudo[40904]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 18 11:31:07 managed-node3 systemd[29973]: Started podman-40908.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 142. Jan 18 11:31:07 managed-node3 sudo[40904]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 18 11:31:08 managed-node3 sudo[41087]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-hcnnielzurssyzrkhcnyyrnehsirhhso ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1737217867.9211812-14685-196871072491038/AnsiballZ_command.py' Jan 18 11:31:08 managed-node3 sudo[41087]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 18 11:31:08 managed-node3 python3.12[41090]: ansible-ansible.legacy.command Invoked with _raw_params=podman image prune --all -f _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:31:08 managed-node3 systemd[29973]: Started podman-41091.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 146. Jan 18 11:31:08 managed-node3 sudo[41087]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 18 11:31:09 managed-node3 sudo[41270]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ufsysfowvjvxhfpfvyaifqsmvcbprmau ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1737217868.8519945-14706-251100404491153/AnsiballZ_command.py' Jan 18 11:31:09 managed-node3 sudo[41270]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 18 11:31:09 managed-node3 python3.12[41273]: ansible-ansible.legacy.command Invoked with _raw_params=podman images -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:31:09 managed-node3 systemd[29973]: Started podman-41274.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 150. Jan 18 11:31:09 managed-node3 sudo[41270]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 18 11:31:09 managed-node3 sudo[41453]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-jdaifgneihvtvbktjzvirgynppriuuuw ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1737217869.370208-14718-236814596261461/AnsiballZ_command.py' Jan 18 11:31:09 managed-node3 sudo[41453]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 18 11:31:09 managed-node3 python3.12[41456]: ansible-ansible.legacy.command Invoked with _raw_params=podman volume ls -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:31:09 managed-node3 systemd[29973]: Started podman-41457.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 154. Jan 18 11:31:09 managed-node3 sudo[41453]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 18 11:31:10 managed-node3 sudo[41637]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-xvjwvcfmqjsvevcdftxyptxppvtqqxha ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1737217869.878298-14733-197328738568060/AnsiballZ_command.py' Jan 18 11:31:10 managed-node3 sudo[41637]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 18 11:31:10 managed-node3 python3.12[41640]: ansible-ansible.legacy.command Invoked with _raw_params=podman ps --noheading _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:31:10 managed-node3 systemd[29973]: Started podman-41641.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 158. Jan 18 11:31:10 managed-node3 sudo[41637]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 18 11:31:10 managed-node3 sudo[41820]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-kkwvljnrnvhplykxecmbxduoqupvnbmc ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1737217870.3851564-14743-240600894088655/AnsiballZ_command.py' Jan 18 11:31:10 managed-node3 sudo[41820]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 18 11:31:10 managed-node3 python3.12[41823]: ansible-ansible.legacy.command Invoked with _raw_params=podman network ls -n -q _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:31:10 managed-node3 systemd[29973]: Started podman-41824.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 162. Jan 18 11:31:10 managed-node3 sudo[41820]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 18 11:31:11 managed-node3 sudo[42003]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-yzdvzqqtefcforrnhesxdfunpfitnwug ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1737217870.8897827-14760-22095396308566/AnsiballZ_command.py' Jan 18 11:31:11 managed-node3 sudo[42003]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 18 11:31:11 managed-node3 systemd[29973]: Started podman-42007.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 166. Jan 18 11:31:11 managed-node3 sudo[42003]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 18 11:31:11 managed-node3 sudo[42186]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-zzrxiwzylhykphjemjialqcipldneuxi ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1737217871.3950086-14770-229633960531972/AnsiballZ_command.py' Jan 18 11:31:11 managed-node3 sudo[42186]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 18 11:31:11 managed-node3 systemd[29973]: Started podman-42190.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 170. Jan 18 11:31:11 managed-node3 sudo[42186]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 18 11:31:12 managed-node3 sudo[42370]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-wqlatwuxztsrkiwzdhhulvlnfnjxwjub ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1737217871.9111993-14787-233362266439283/AnsiballZ_service_facts.py' Jan 18 11:31:12 managed-node3 sudo[42370]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 18 11:31:12 managed-node3 python3.12[42373]: ansible-service_facts Invoked Jan 18 11:31:15 managed-node3 sudo[42370]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 18 11:31:16 managed-node3 python3.12[42613]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:31:17 managed-node3 python3.12[42746]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user_quadlet_basic _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:31:17 managed-node3 python3.12[42878]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user_quadlet_basic _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:31:18 managed-node3 python3.12[43010]: ansible-stat Invoked with path=/run/user/1111 follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:31:19 managed-node3 sudo[43185]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ufxynzowlyscfseqdeeuxdyrmfbzybfz ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1737217878.8291292-14919-192979650272076/AnsiballZ_systemd.py' Jan 18 11:31:19 managed-node3 sudo[43185]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 18 11:31:19 managed-node3 python3.12[43188]: ansible-systemd Invoked with name=quadlet-basic-unused-volume-volume.service scope=user state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None Jan 18 11:31:19 managed-node3 systemd[29973]: Reload requested from client PID 43191 ('systemctl')... Jan 18 11:31:19 managed-node3 systemd[29973]: Reloading... Jan 18 11:31:19 managed-node3 systemd[29973]: Reloading finished in 42 ms. Jan 18 11:31:19 managed-node3 systemd[29973]: Stopped quadlet-basic-unused-volume-volume.service. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 174 and the job result is done. Jan 18 11:31:19 managed-node3 sudo[43185]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 18 11:31:19 managed-node3 python3.12[43332]: ansible-stat Invoked with path=/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-unused-volume.volume follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:31:20 managed-node3 python3.12[43596]: ansible-file Invoked with path=/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-unused-volume.volume state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:31:21 managed-node3 sudo[43769]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-kfbdboinzlndnomkwwfmswkjwbfzvofy ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1737217881.0071807-14995-247841813636780/AnsiballZ_systemd.py' Jan 18 11:31:21 managed-node3 sudo[43769]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 18 11:31:21 managed-node3 python3.12[43772]: ansible-systemd Invoked with daemon_reload=True scope=user daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 18 11:31:21 managed-node3 systemd[29973]: Reload requested from client PID 43773 ('systemctl')... Jan 18 11:31:21 managed-node3 systemd[29973]: Reloading... Jan 18 11:31:21 managed-node3 systemd[29973]: Reloading finished in 41 ms. Jan 18 11:31:21 managed-node3 sudo[43769]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 18 11:31:21 managed-node3 sudo[43956]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-eiytxxwkzhiighyzmcjtkkmrvqlkrpij ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1737217881.7087734-15020-12485062389799/AnsiballZ_command.py' Jan 18 11:31:21 managed-node3 sudo[43956]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 18 11:31:22 managed-node3 systemd[29973]: Started podman-43960.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 175. Jan 18 11:31:22 managed-node3 sudo[43956]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 18 11:31:22 managed-node3 sudo[44140]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-cepsqiywpkhxjipbwlytwypnceimyaab ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1737217882.4389207-15054-168186560302029/AnsiballZ_command.py' Jan 18 11:31:22 managed-node3 sudo[44140]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 18 11:31:22 managed-node3 python3.12[44143]: ansible-ansible.legacy.command Invoked with _raw_params=podman image prune --all -f _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:31:22 managed-node3 systemd[29973]: Started podman-44144.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 179. Jan 18 11:31:22 managed-node3 sudo[44140]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 18 11:31:23 managed-node3 sudo[44323]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-xsxgitcwacyargjwdumrleyctcmtblev ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1737217883.1806884-15091-260064054229672/AnsiballZ_command.py' Jan 18 11:31:23 managed-node3 sudo[44323]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 18 11:31:23 managed-node3 python3.12[44326]: ansible-ansible.legacy.command Invoked with _raw_params=podman images -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:31:23 managed-node3 systemd[29973]: Started podman-44327.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 183. Jan 18 11:31:23 managed-node3 sudo[44323]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 18 11:31:23 managed-node3 sudo[44506]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-wuhrvhfcmcvhnxlawanqkklgvhsavqxz ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1737217883.6886153-15113-12737569554717/AnsiballZ_command.py' Jan 18 11:31:23 managed-node3 sudo[44506]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 18 11:31:24 managed-node3 python3.12[44509]: ansible-ansible.legacy.command Invoked with _raw_params=podman volume ls -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:31:24 managed-node3 systemd[29973]: Started podman-44510.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 187. Jan 18 11:31:24 managed-node3 sudo[44506]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 18 11:31:24 managed-node3 sudo[44689]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-vxrkjdrvhrqfqbnrdrmtbqznwjmiuqbb ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1737217884.2572355-15140-273508192601126/AnsiballZ_command.py' Jan 18 11:31:24 managed-node3 sudo[44689]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 18 11:31:24 managed-node3 python3.12[44692]: ansible-ansible.legacy.command Invoked with _raw_params=podman ps --noheading _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:31:24 managed-node3 systemd[29973]: Started podman-44693.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 191. Jan 18 11:31:24 managed-node3 sudo[44689]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 18 11:31:25 managed-node3 sudo[44872]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-rpvxeuzhlqffknexfabscxpimgdbbith ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1737217884.790263-15152-74985812420786/AnsiballZ_command.py' Jan 18 11:31:25 managed-node3 sudo[44872]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 18 11:31:25 managed-node3 python3.12[44875]: ansible-ansible.legacy.command Invoked with _raw_params=podman network ls -n -q _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:31:25 managed-node3 systemd[29973]: Started podman-44876.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 195. Jan 18 11:31:25 managed-node3 sudo[44872]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 18 11:31:25 managed-node3 sudo[45055]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-axvewalfxlehcjokjhitckuadpitkeiy ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1737217885.339471-15181-22635682611408/AnsiballZ_command.py' Jan 18 11:31:25 managed-node3 sudo[45055]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 18 11:31:25 managed-node3 systemd[29973]: Started podman-45059.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 199. Jan 18 11:31:25 managed-node3 sudo[45055]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 18 11:31:26 managed-node3 sudo[45238]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-mfxavyjtcoqqcmubjzxtfeypzmflondq ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1737217885.8910518-15202-263134519922898/AnsiballZ_command.py' Jan 18 11:31:26 managed-node3 sudo[45238]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 18 11:31:26 managed-node3 systemd[29973]: Started podman-45242.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 203. Jan 18 11:31:26 managed-node3 sudo[45238]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 18 11:31:26 managed-node3 sudo[45421]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-kyxdapbqxustinkhblrclpmqmhkfplpt ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1737217886.4665396-15230-204771510950430/AnsiballZ_service_facts.py' Jan 18 11:31:26 managed-node3 sudo[45421]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 18 11:31:26 managed-node3 python3.12[45424]: ansible-service_facts Invoked Jan 18 11:31:29 managed-node3 sudo[45421]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 18 11:31:30 managed-node3 python3.12[45664]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:31:30 managed-node3 python3.12[45797]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user_quadlet_basic _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:31:31 managed-node3 python3.12[45929]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user_quadlet_basic _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:31:32 managed-node3 python3.12[46061]: ansible-stat Invoked with path=/run/user/1111 follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:31:32 managed-node3 sudo[46236]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-iezyvwuvvaqjjnwtesaxgundqypbcxdp ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1737217892.4317665-15438-151370407684923/AnsiballZ_systemd.py' Jan 18 11:31:32 managed-node3 sudo[46236]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 18 11:31:32 managed-node3 python3.12[46239]: ansible-systemd Invoked with name=quadlet-basic-mysql-volume.service scope=user state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None Jan 18 11:31:32 managed-node3 systemd[29973]: Reload requested from client PID 46242 ('systemctl')... Jan 18 11:31:32 managed-node3 systemd[29973]: Reloading... Jan 18 11:31:32 managed-node3 systemd[29973]: Reloading finished in 41 ms. Jan 18 11:31:32 managed-node3 systemd[29973]: Stopped quadlet-basic-mysql-volume.service. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 207 and the job result is done. Jan 18 11:31:33 managed-node3 sudo[46236]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 18 11:31:33 managed-node3 python3.12[46383]: ansible-stat Invoked with path=/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-mysql.volume follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:31:34 managed-node3 python3.12[46647]: ansible-file Invoked with path=/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-mysql.volume state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:31:34 managed-node3 sudo[46820]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-pbfdlssqyvalpgpnigkdztgjaybzejke ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1737217894.542603-15490-11564031454327/AnsiballZ_systemd.py' Jan 18 11:31:34 managed-node3 sudo[46820]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 18 11:31:35 managed-node3 python3.12[46823]: ansible-systemd Invoked with daemon_reload=True scope=user daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 18 11:31:35 managed-node3 systemd[29973]: Reload requested from client PID 46824 ('systemctl')... Jan 18 11:31:35 managed-node3 systemd[29973]: Reloading... Jan 18 11:31:35 managed-node3 systemd[29973]: Reloading finished in 41 ms. Jan 18 11:31:35 managed-node3 sudo[46820]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 18 11:31:35 managed-node3 sudo[47006]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ptjtyflcjlblnvdtbcgwfhzbrzzkulye ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1737217895.2079527-15500-217318256331841/AnsiballZ_command.py' Jan 18 11:31:35 managed-node3 sudo[47006]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 18 11:31:35 managed-node3 systemd[29973]: Started podman-47010.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 208. Jan 18 11:31:35 managed-node3 sudo[47006]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 18 11:31:35 managed-node3 sudo[47189]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-vntpkomrjsnilzlvuathchlcgapwziyn ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1737217895.8089776-15519-229275331047155/AnsiballZ_command.py' Jan 18 11:31:35 managed-node3 sudo[47189]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 18 11:31:36 managed-node3 python3.12[47192]: ansible-ansible.legacy.command Invoked with _raw_params=podman image prune --all -f _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:31:36 managed-node3 systemd[29973]: Started podman-47193.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 212. Jan 18 11:31:36 managed-node3 sudo[47189]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 18 11:31:36 managed-node3 sudo[47373]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-qzbhyahfgmdiyykkqxewwhcrljlkbyjy ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1737217896.5112767-15540-85472901319357/AnsiballZ_command.py' Jan 18 11:31:36 managed-node3 sudo[47373]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 18 11:31:36 managed-node3 python3.12[47376]: ansible-ansible.legacy.command Invoked with _raw_params=podman images -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:31:36 managed-node3 systemd[29973]: Started podman-47377.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 216. Jan 18 11:31:36 managed-node3 sudo[47373]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 18 11:31:37 managed-node3 sudo[47557]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-wnmdcpcqspwazcfcjhjmnzwbobpbsgut ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1737217897.0095932-15550-155500357360162/AnsiballZ_command.py' Jan 18 11:31:37 managed-node3 sudo[47557]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 18 11:31:37 managed-node3 python3.12[47560]: ansible-ansible.legacy.command Invoked with _raw_params=podman volume ls -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:31:37 managed-node3 systemd[29973]: Started podman-47561.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 220. Jan 18 11:31:37 managed-node3 sudo[47557]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 18 11:31:37 managed-node3 sudo[47740]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-dmfopnaqpjwhooezezbbnsgthelpepym ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1737217897.5339997-15566-9721764906252/AnsiballZ_command.py' Jan 18 11:31:37 managed-node3 sudo[47740]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 18 11:31:37 managed-node3 python3.12[47743]: ansible-ansible.legacy.command Invoked with _raw_params=podman ps --noheading _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:31:37 managed-node3 systemd[29973]: Started podman-47744.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 224. Jan 18 11:31:37 managed-node3 sudo[47740]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 18 11:31:38 managed-node3 sudo[47924]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-arcgcaugphgzfgjemnrntbzyxrkyschl ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1737217898.032509-15577-81042579014384/AnsiballZ_command.py' Jan 18 11:31:38 managed-node3 sudo[47924]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 18 11:31:38 managed-node3 python3.12[47927]: ansible-ansible.legacy.command Invoked with _raw_params=podman network ls -n -q _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:31:38 managed-node3 systemd[29973]: Started podman-47928.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 228. Jan 18 11:31:38 managed-node3 sudo[47924]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 18 11:31:38 managed-node3 sudo[48109]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-zrnmmaivibwzbtfkmnbczhvhvjwqzkhv ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1737217898.618975-15594-109621850643217/AnsiballZ_command.py' Jan 18 11:31:38 managed-node3 sudo[48109]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 18 11:31:38 managed-node3 systemd[29973]: Started podman-48113.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 232. Jan 18 11:31:39 managed-node3 sudo[48109]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 18 11:31:39 managed-node3 sudo[48293]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-gyytteyydruagjdefcmwpduzpfbmsdac ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1737217899.1199632-15604-276202343192042/AnsiballZ_command.py' Jan 18 11:31:39 managed-node3 sudo[48293]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 18 11:31:39 managed-node3 systemd[29973]: Started podman-48297.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 236. Jan 18 11:31:39 managed-node3 sudo[48293]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 18 11:31:39 managed-node3 sudo[48477]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-tnlvvrjsnuirbnrmfjscxvkucfjbrvve ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1737217899.6462133-15621-200112807924514/AnsiballZ_service_facts.py' Jan 18 11:31:39 managed-node3 sudo[48477]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 18 11:31:39 managed-node3 python3.12[48480]: ansible-service_facts Invoked Jan 18 11:31:41 managed-node3 sudo[48477]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 18 11:31:42 managed-node3 python3.12[48720]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:31:42 managed-node3 python3.12[48853]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user_quadlet_basic _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:31:43 managed-node3 python3.12[48985]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user_quadlet_basic _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:31:44 managed-node3 python3.12[49117]: ansible-stat Invoked with path=/run/user/1111 follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:31:44 managed-node3 sudo[49292]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-gfgrklmqkzjyuiytatxnltkrtnifkoyu ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1737217904.3145027-15712-137763552022743/AnsiballZ_systemd.py' Jan 18 11:31:44 managed-node3 sudo[49292]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 18 11:31:44 managed-node3 python3.12[49295]: ansible-systemd Invoked with name=quadlet-basic-unused-network-network.service scope=user state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None Jan 18 11:31:44 managed-node3 systemd[29973]: Reload requested from client PID 49298 ('systemctl')... Jan 18 11:31:44 managed-node3 systemd[29973]: Reloading... Jan 18 11:31:44 managed-node3 systemd[29973]: Reloading finished in 39 ms. Jan 18 11:31:44 managed-node3 systemd[29973]: Stopped quadlet-basic-unused-network-network.service. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 240 and the job result is done. Jan 18 11:31:44 managed-node3 sudo[49292]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 18 11:31:45 managed-node3 python3.12[49439]: ansible-stat Invoked with path=/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-unused-network.network follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:31:46 managed-node3 python3.12[49703]: ansible-file Invoked with path=/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-unused-network.network state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:31:46 managed-node3 sudo[49876]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-dteolcbnjxsyjsnpeuieezypbxczqmls ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1737217906.4543846-15771-80205442291358/AnsiballZ_systemd.py' Jan 18 11:31:46 managed-node3 sudo[49876]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 18 11:31:46 managed-node3 python3.12[49879]: ansible-systemd Invoked with daemon_reload=True scope=user daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 18 11:31:46 managed-node3 systemd[29973]: Reload requested from client PID 49880 ('systemctl')... Jan 18 11:31:46 managed-node3 systemd[29973]: Reloading... Jan 18 11:31:46 managed-node3 systemd[29973]: Reloading finished in 39 ms. Jan 18 11:31:46 managed-node3 sudo[49876]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 18 11:31:47 managed-node3 sudo[50062]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-bisaohfypiqiayhcewvdddvyqubaeysl ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1737217907.1082094-15788-191242659565991/AnsiballZ_command.py' Jan 18 11:31:47 managed-node3 sudo[50062]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 18 11:31:47 managed-node3 systemd[29973]: Started podman-50066.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 241. Jan 18 11:31:47 managed-node3 sudo[50062]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 18 11:31:47 managed-node3 sudo[50246]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-seyqprcdjwuwsvxvpemydqhuqeswupiv ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1737217907.6960585-15800-186404247202007/AnsiballZ_command.py' Jan 18 11:31:47 managed-node3 sudo[50246]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 18 11:31:48 managed-node3 python3.12[50249]: ansible-ansible.legacy.command Invoked with _raw_params=podman image prune --all -f _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:31:48 managed-node3 systemd[29973]: Started podman-50250.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 245. Jan 18 11:31:48 managed-node3 sudo[50246]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 18 11:31:48 managed-node3 sudo[50431]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ciuzjljzhjrukcmyjemzytbvlrazedai ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1737217908.371163-15821-21342501503431/AnsiballZ_command.py' Jan 18 11:31:48 managed-node3 sudo[50431]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 18 11:31:48 managed-node3 python3.12[50434]: ansible-ansible.legacy.command Invoked with _raw_params=podman images -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:31:48 managed-node3 systemd[29973]: Started podman-50435.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 249. Jan 18 11:31:48 managed-node3 sudo[50431]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 18 11:31:49 managed-node3 sudo[50614]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-yqigzgklkuydxgedfnjirectkqlfbknm ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1737217908.8740768-15831-54456662911504/AnsiballZ_command.py' Jan 18 11:31:49 managed-node3 sudo[50614]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 18 11:31:49 managed-node3 python3.12[50617]: ansible-ansible.legacy.command Invoked with _raw_params=podman volume ls -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:31:49 managed-node3 systemd[29973]: Started podman-50618.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 253. Jan 18 11:31:49 managed-node3 sudo[50614]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 18 11:31:49 managed-node3 sudo[50798]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-dambmvwbunrzfexcyeoswitswxcvhtoh ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1737217909.3853805-15848-49300951860089/AnsiballZ_command.py' Jan 18 11:31:49 managed-node3 sudo[50798]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 18 11:31:49 managed-node3 python3.12[50801]: ansible-ansible.legacy.command Invoked with _raw_params=podman ps --noheading _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:31:49 managed-node3 systemd[29973]: Started podman-50802.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 257. Jan 18 11:31:49 managed-node3 sudo[50798]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 18 11:31:50 managed-node3 sudo[50981]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-dzekhpjrbaaiprimbxqcwtrojemycudu ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1737217909.8936374-15858-214575503253220/AnsiballZ_command.py' Jan 18 11:31:50 managed-node3 sudo[50981]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 18 11:31:50 managed-node3 python3.12[50984]: ansible-ansible.legacy.command Invoked with _raw_params=podman network ls -n -q _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:31:50 managed-node3 systemd[29973]: Started podman-50985.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 261. Jan 18 11:31:50 managed-node3 sudo[50981]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 18 11:31:50 managed-node3 sudo[51164]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-astfauzbtlnyzotuyngzylmtotbexkhx ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1737217910.4299242-15886-10840344977714/AnsiballZ_command.py' Jan 18 11:31:50 managed-node3 sudo[51164]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 18 11:31:50 managed-node3 systemd[29973]: Started podman-51168.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 265. Jan 18 11:31:50 managed-node3 sudo[51164]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 18 11:31:51 managed-node3 sudo[51348]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-qnkwrhkglcilrzgpffjtisuwmuprmntt ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1737217910.9977157-15908-121699951720493/AnsiballZ_command.py' Jan 18 11:31:51 managed-node3 sudo[51348]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 18 11:31:51 managed-node3 systemd[29973]: Started podman-51352.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 269. Jan 18 11:31:51 managed-node3 sudo[51348]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 18 11:31:51 managed-node3 sudo[51532]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-simwnchnqmiyyqolrjaeaufnlzkhhvul ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1737217911.544949-15935-194783824647339/AnsiballZ_service_facts.py' Jan 18 11:31:51 managed-node3 sudo[51532]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 18 11:31:51 managed-node3 python3.12[51535]: ansible-service_facts Invoked Jan 18 11:31:54 managed-node3 sudo[51532]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 18 11:31:55 managed-node3 python3.12[51775]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:31:55 managed-node3 python3.12[51908]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user_quadlet_basic _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:31:56 managed-node3 python3.12[52040]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user_quadlet_basic _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:31:57 managed-node3 python3.12[52172]: ansible-stat Invoked with path=/run/user/1111 follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:31:57 managed-node3 sudo[52347]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-iifytpondsnynaffaucejeaaenrqrtvp ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1737217917.4031916-16076-74879337170558/AnsiballZ_systemd.py' Jan 18 11:31:57 managed-node3 sudo[52347]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 18 11:31:57 managed-node3 python3.12[52350]: ansible-systemd Invoked with name=quadlet-basic-network.service scope=user state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None Jan 18 11:31:57 managed-node3 systemd[29973]: Reload requested from client PID 52353 ('systemctl')... Jan 18 11:31:57 managed-node3 systemd[29973]: Reloading... Jan 18 11:31:57 managed-node3 systemd[29973]: Reloading finished in 39 ms. Jan 18 11:31:57 managed-node3 systemd[29973]: Stopped quadlet-basic-network.service. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 273 and the job result is done. Jan 18 11:31:58 managed-node3 sudo[52347]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 18 11:31:58 managed-node3 python3.12[52494]: ansible-stat Invoked with path=/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic.network follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:31:59 managed-node3 python3.12[52758]: ansible-file Invoked with path=/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic.network state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:31:59 managed-node3 sudo[52931]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-lzwmrbjluyntyshpwlrotynawuyzccng ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1737217919.5714548-16153-261441808501459/AnsiballZ_systemd.py' Jan 18 11:31:59 managed-node3 sudo[52931]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 18 11:32:00 managed-node3 python3.12[52934]: ansible-systemd Invoked with daemon_reload=True scope=user daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 18 11:32:00 managed-node3 systemd[29973]: Reload requested from client PID 52935 ('systemctl')... Jan 18 11:32:00 managed-node3 systemd[29973]: Reloading... Jan 18 11:32:00 managed-node3 systemd[29973]: Reloading finished in 39 ms. Jan 18 11:32:00 managed-node3 sudo[52931]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 18 11:32:00 managed-node3 sudo[53117]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-xllyhjougtuhqplmfxcdjhyfceeohnrg ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1737217920.2289822-16163-224965948801790/AnsiballZ_command.py' Jan 18 11:32:00 managed-node3 sudo[53117]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 18 11:32:00 managed-node3 systemd[29973]: Started podman-53121.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 274. Jan 18 11:32:00 managed-node3 sudo[53117]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 18 11:32:01 managed-node3 sudo[53300]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-tnplzstzsdbrhgkkoqitffdibvihjrdx ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1737217920.8281002-16182-113862647210702/AnsiballZ_command.py' Jan 18 11:32:01 managed-node3 sudo[53300]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 18 11:32:01 managed-node3 python3.12[53303]: ansible-ansible.legacy.command Invoked with _raw_params=podman image prune --all -f _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:32:01 managed-node3 systemd[29973]: Started podman-53304.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 278. Jan 18 11:32:01 managed-node3 sudo[53300]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 18 11:32:01 managed-node3 sudo[53483]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-dlasokbnvuundhfxlfiqclsdtqizmpdo ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1737217921.5415003-16200-130911081289208/AnsiballZ_command.py' Jan 18 11:32:01 managed-node3 sudo[53483]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 18 11:32:01 managed-node3 python3.12[53486]: ansible-ansible.legacy.command Invoked with _raw_params=podman images -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:32:01 managed-node3 systemd[29973]: Started podman-53487.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 282. Jan 18 11:32:01 managed-node3 sudo[53483]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 18 11:32:02 managed-node3 sudo[53667]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ypgscgzljkjzgespeddzbehesnerlzzb ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1737217922.0525985-16213-196474771390276/AnsiballZ_command.py' Jan 18 11:32:02 managed-node3 sudo[53667]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 18 11:32:02 managed-node3 python3.12[53670]: ansible-ansible.legacy.command Invoked with _raw_params=podman volume ls -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:32:02 managed-node3 systemd[29973]: Started podman-53671.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 286. Jan 18 11:32:02 managed-node3 sudo[53667]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 18 11:32:02 managed-node3 sudo[53851]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-tdakodnmpufrlwlfgxkxispihygmyvrg ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1737217922.5710473-16223-167378774294699/AnsiballZ_command.py' Jan 18 11:32:02 managed-node3 sudo[53851]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 18 11:32:02 managed-node3 python3.12[53854]: ansible-ansible.legacy.command Invoked with _raw_params=podman ps --noheading _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:32:02 managed-node3 systemd[29973]: Started podman-53855.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 290. Jan 18 11:32:02 managed-node3 sudo[53851]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 18 11:32:03 managed-node3 sudo[54034]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ccdxkacwutwbyrgbqvgbvzoyxphdjlwn ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1737217923.0813434-16240-13486168685793/AnsiballZ_command.py' Jan 18 11:32:03 managed-node3 sudo[54034]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 18 11:32:03 managed-node3 python3.12[54037]: ansible-ansible.legacy.command Invoked with _raw_params=podman network ls -n -q _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:32:03 managed-node3 systemd[29973]: Started podman-54038.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 294. Jan 18 11:32:03 managed-node3 sudo[54034]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 18 11:32:03 managed-node3 sudo[54217]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-rpoqqbpwrnlvuoproyvufwcphodxxwyp ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1737217923.5970502-16250-213164779590416/AnsiballZ_command.py' Jan 18 11:32:03 managed-node3 sudo[54217]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 18 11:32:03 managed-node3 systemd[29973]: Started podman-54221.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 298. Jan 18 11:32:04 managed-node3 sudo[54217]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 18 11:32:04 managed-node3 sudo[54400]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-skgufdbgmsgyzfntwfiboduufmvijzgk ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1737217924.1123185-16273-67040702934455/AnsiballZ_command.py' Jan 18 11:32:04 managed-node3 sudo[54400]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 18 11:32:04 managed-node3 systemd[29973]: Started podman-54404.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 302. Jan 18 11:32:04 managed-node3 sudo[54400]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 18 11:32:04 managed-node3 sudo[54583]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-yktyzrtuopfbqvdklsrqcmzagmbdwvoj ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1737217924.6292768-16284-113988958836027/AnsiballZ_service_facts.py' Jan 18 11:32:04 managed-node3 sudo[54583]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 18 11:32:04 managed-node3 python3.12[54586]: ansible-service_facts Invoked Jan 18 11:32:06 managed-node3 sudo[54583]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 18 11:32:07 managed-node3 python3.12[54826]: ansible-stat Invoked with path=/run/user/1111 follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:32:07 managed-node3 sudo[55001]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-wjhunazuhojxixzvyyyaiaohfjmavkzd ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1737217927.4354887-16327-197523963537724/AnsiballZ_podman_container_info.py' Jan 18 11:32:07 managed-node3 sudo[55001]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 18 11:32:07 managed-node3 python3.12[55004]: ansible-containers.podman.podman_container_info Invoked with executable=podman name=None Jan 18 11:32:07 managed-node3 systemd[29973]: Started podman-55005.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 306. Jan 18 11:32:07 managed-node3 sudo[55001]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 18 11:32:08 managed-node3 sudo[55185]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-xgumtrgzxyiwabfwgjdohxkezwjeimnu ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1737217928.0309803-16346-240474434830180/AnsiballZ_command.py' Jan 18 11:32:08 managed-node3 sudo[55185]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 18 11:32:08 managed-node3 python3.12[55188]: ansible-ansible.legacy.command Invoked with _raw_params=podman network ls -q _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:32:08 managed-node3 systemd[29973]: Started podman-55189.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 310. Jan 18 11:32:08 managed-node3 sudo[55185]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 18 11:32:08 managed-node3 sudo[55368]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-horiwqbwsvodheardslaoegzjdeydvsz ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1737217928.5449963-16356-145683635018501/AnsiballZ_command.py' Jan 18 11:32:08 managed-node3 sudo[55368]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Jan 18 11:32:08 managed-node3 python3.12[55371]: ansible-ansible.legacy.command Invoked with _raw_params=podman secret ls -n -q _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:32:08 managed-node3 systemd[29973]: Started podman-55372.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 314. Jan 18 11:32:08 managed-node3 sudo[55368]: pam_unix(sudo:session): session closed for user user_quadlet_basic Jan 18 11:32:09 managed-node3 python3.12[55509]: ansible-ansible.legacy.command Invoked with removes=/var/lib/systemd/linger/user_quadlet_basic _raw_params=loginctl disable-linger user_quadlet_basic _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None stdin=None Jan 18 11:32:09 managed-node3 systemd[1]: Stopping user@1111.service - User Manager for UID 1111... ░░ Subject: A stop job for unit user@1111.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit user@1111.service has begun execution. ░░ ░░ The job identifier is 1644. Jan 18 11:32:09 managed-node3 systemd[29973]: Activating special unit exit.target... Jan 18 11:32:09 managed-node3 systemd[29973]: Stopping podman-pause-10d5552c.scope... ░░ Subject: A stop job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has begun execution. ░░ ░░ The job identifier is 339. Jan 18 11:32:09 managed-node3 systemd[29973]: Stopped target default.target - Main User Target. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 327 and the job result is done. Jan 18 11:32:09 managed-node3 systemd[29973]: Stopped podman-user-wait-network-online.service - Wait for system level network-online.target as user.. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 328 and the job result is done. Jan 18 11:32:09 managed-node3 systemd[29973]: Stopped target basic.target - Basic System. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 326 and the job result is done. Jan 18 11:32:09 managed-node3 systemd[29973]: Stopped target paths.target - Paths. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 333 and the job result is done. Jan 18 11:32:09 managed-node3 systemd[29973]: Stopped target sockets.target - Sockets. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 322 and the job result is done. Jan 18 11:32:09 managed-node3 systemd[29973]: Stopped target timers.target - Timers. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 336 and the job result is done. Jan 18 11:32:09 managed-node3 systemd[29973]: Stopped grub-boot-success.timer - Mark boot as successful after the user session has run 2 minutes. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 334 and the job result is done. Jan 18 11:32:09 managed-node3 systemd[29973]: Stopped systemd-tmpfiles-clean.timer - Daily Cleanup of User's Temporary Directories. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 335 and the job result is done. Jan 18 11:32:09 managed-node3 systemd[29973]: Stopping dbus-broker.service - D-Bus User Message Bus... ░░ Subject: A stop job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has begun execution. ░░ ░░ The job identifier is 332. Jan 18 11:32:09 managed-node3 systemd[29973]: Stopped systemd-tmpfiles-setup.service - Create User Files and Directories. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 330 and the job result is done. Jan 18 11:32:09 managed-node3 systemd[29973]: Stopped podman-pause-10d5552c.scope. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 339 and the job result is done. Jan 18 11:32:09 managed-node3 systemd[29973]: Removed slice user.slice - Slice /user. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 338 and the job result is done. Jan 18 11:32:09 managed-node3 systemd[29973]: user.slice: Consumed 8.843s CPU time, 483.5M memory peak. ░░ Subject: Resources consumed by unit runtime ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit UNIT completed and consumed the indicated resources. Jan 18 11:32:09 managed-node3 dbus-broker[30326]: Dispatched 4332 messages @ 2(±10)μs / message. ░░ Subject: Dispatched 4332 messages ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ This message is printed by dbus-broker when shutting down. It includes metric ░░ information collected during the runtime of dbus-broker. ░░ ░░ The message lists the number of dispatched messages ░░ (in this case 4332) as well as the mean time to ░░ handling a single message. The time measurements exclude the time spent on ░░ writing to and reading from the kernel. Jan 18 11:32:09 managed-node3 systemd[29973]: Stopped dbus-broker.service - D-Bus User Message Bus. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 332 and the job result is done. Jan 18 11:32:09 managed-node3 systemd[29973]: Removed slice session.slice - User Core Session Slice. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 337 and the job result is done. Jan 18 11:32:09 managed-node3 systemd[29973]: Closed dbus.socket - D-Bus User Message Bus Socket. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 331 and the job result is done. Jan 18 11:32:09 managed-node3 systemd[29973]: Removed slice app.slice - User Application Slice. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 329 and the job result is done. Jan 18 11:32:09 managed-node3 systemd[29973]: Reached target shutdown.target - Shutdown. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 321. Jan 18 11:32:09 managed-node3 systemd[29973]: Finished systemd-exit.service - Exit the Session. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 319. Jan 18 11:32:09 managed-node3 systemd[29973]: Reached target exit.target - Exit the Session. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 318. Jan 18 11:32:09 managed-node3 systemd[1]: user@1111.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit user@1111.service has successfully entered the 'dead' state. Jan 18 11:32:09 managed-node3 systemd[1]: Stopped user@1111.service - User Manager for UID 1111. ░░ Subject: A stop job for unit user@1111.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit user@1111.service has finished. ░░ ░░ The job identifier is 1644 and the job result is done. Jan 18 11:32:09 managed-node3 systemd[1]: user@1111.service: Consumed 10.541s CPU time, 489.8M memory peak. ░░ Subject: Resources consumed by unit runtime ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit user@1111.service completed and consumed the indicated resources. Jan 18 11:32:09 managed-node3 systemd[1]: Stopping user-runtime-dir@1111.service - User Runtime Directory /run/user/1111... ░░ Subject: A stop job for unit user-runtime-dir@1111.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit user-runtime-dir@1111.service has begun execution. ░░ ░░ The job identifier is 1643. Jan 18 11:32:09 managed-node3 systemd[1]: run-user-1111.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-user-1111.mount has successfully entered the 'dead' state. Jan 18 11:32:09 managed-node3 systemd[1]: user-runtime-dir@1111.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit user-runtime-dir@1111.service has successfully entered the 'dead' state. Jan 18 11:32:09 managed-node3 systemd[1]: Stopped user-runtime-dir@1111.service - User Runtime Directory /run/user/1111. ░░ Subject: A stop job for unit user-runtime-dir@1111.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit user-runtime-dir@1111.service has finished. ░░ ░░ The job identifier is 1643 and the job result is done. Jan 18 11:32:09 managed-node3 systemd[4342]: Created slice background.slice - User Background Tasks Slice. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 14. Jan 18 11:32:09 managed-node3 systemd[4342]: Starting systemd-tmpfiles-clean.service - Cleanup of User's Temporary Files and Directories... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 13. Jan 18 11:32:09 managed-node3 systemd-logind[646]: Removed session 6. ░░ Subject: Session 6 has been terminated ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A session with the ID 6 has been terminated. Jan 18 11:32:09 managed-node3 systemd[1]: Removed slice user-1111.slice - User Slice of UID 1111. ░░ Subject: A stop job for unit user-1111.slice has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit user-1111.slice has finished. ░░ ░░ The job identifier is 1645 and the job result is done. Jan 18 11:32:09 managed-node3 systemd[1]: user-1111.slice: Consumed 10.570s CPU time, 489.9M memory peak. ░░ Subject: Resources consumed by unit runtime ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit user-1111.slice completed and consumed the indicated resources. Jan 18 11:32:09 managed-node3 systemd[4342]: Finished systemd-tmpfiles-clean.service - Cleanup of User's Temporary Files and Directories. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 13. Jan 18 11:32:09 managed-node3 python3.12[55648]: ansible-ansible.legacy.command Invoked with _raw_params=loginctl show-user --value -p State user_quadlet_basic _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:32:10 managed-node3 python3.12[55780]: ansible-ansible.legacy.systemd Invoked with name=systemd-logind state=stopped daemon_reload=False daemon_reexec=False scope=system no_block=False enabled=None force=None masked=None Jan 18 11:32:10 managed-node3 systemd[1]: Stopping systemd-logind.service - User Login Management... ░░ Subject: A stop job for unit systemd-logind.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit systemd-logind.service has begun execution. ░░ ░░ The job identifier is 1647. Jan 18 11:32:10 managed-node3 systemd[1]: systemd-logind.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-logind.service has successfully entered the 'dead' state. Jan 18 11:32:10 managed-node3 systemd[1]: Stopped systemd-logind.service - User Login Management. ░░ Subject: A stop job for unit systemd-logind.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit systemd-logind.service has finished. ░░ ░░ The job identifier is 1647 and the job result is done. Jan 18 11:32:10 managed-node3 python3.12[55926]: ansible-ansible.legacy.command Invoked with _raw_params=loginctl show-user --value -p State user_quadlet_basic _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:32:10 managed-node3 systemd[1]: Starting modprobe@drm.service - Load Kernel Module drm... ░░ Subject: A start job for unit modprobe@drm.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit modprobe@drm.service has begun execution. ░░ ░░ The job identifier is 1727. Jan 18 11:32:10 managed-node3 systemd[1]: modprobe@drm.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit modprobe@drm.service has successfully entered the 'dead' state. Jan 18 11:32:10 managed-node3 systemd[1]: Finished modprobe@drm.service - Load Kernel Module drm. ░░ Subject: A start job for unit modprobe@drm.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit modprobe@drm.service has finished successfully. ░░ ░░ The job identifier is 1727. Jan 18 11:32:10 managed-node3 systemd[1]: Starting systemd-logind.service - User Login Management... ░░ Subject: A start job for unit systemd-logind.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-logind.service has begun execution. ░░ ░░ The job identifier is 1648. Jan 18 11:32:10 managed-node3 systemd-logind[55930]: New seat seat0. ░░ Subject: A new seat seat0 is now available ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A new seat seat0 has been configured and is now available. Jan 18 11:32:10 managed-node3 systemd-logind[55930]: Watching system buttons on /dev/input/event0 (Power Button) Jan 18 11:32:10 managed-node3 systemd-logind[55930]: Watching system buttons on /dev/input/event1 (Sleep Button) Jan 18 11:32:10 managed-node3 systemd-logind[55930]: Watching system buttons on /dev/input/event2 (AT Translated Set 2 keyboard) Jan 18 11:32:10 managed-node3 systemd[1]: Started systemd-logind.service - User Login Management. ░░ Subject: A start job for unit systemd-logind.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-logind.service has finished successfully. ░░ ░░ The job identifier is 1648. Jan 18 11:32:11 managed-node3 python3.12[56067]: ansible-stat Invoked with path=/var/lib/systemd/linger/user_quadlet_basic follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:32:13 managed-node3 python3.12[56329]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:32:14 managed-node3 python3.12[56466]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None Jan 18 11:32:14 managed-node3 python3.12[56598]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:32:17 managed-node3 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:32:17 managed-node3 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:32:18 managed-node3 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:32:19 managed-node3 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:32:19 managed-node3 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:32:20 managed-node3 python3.12[57048]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:32:21 managed-node3 systemd[1]: Stopping session-3.scope - Session 3 of User root... ░░ Subject: A stop job for unit session-3.scope has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit session-3.scope has begun execution. ░░ ░░ The job identifier is 1811. Jan 18 11:32:21 managed-node3 systemd[1]: Stopping session-5.scope - Session 5 of User root... ░░ Subject: A stop job for unit session-5.scope has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit session-5.scope has begun execution. ░░ ░░ The job identifier is 1812. Jan 18 11:32:21 managed-node3 sshd-session[4389]: error: mm_reap: preauth child terminated by signal 15 Jan 18 11:32:21 managed-node3 sshd-session[6518]: error: mm_reap: preauth child terminated by signal 15 Jan 18 11:32:21 managed-node3 sshd-session[6518]: pam_systemd(sshd:session): Failed to release session: No session '5' known Jan 18 11:32:21 managed-node3 sshd-session[6518]: pam_unix(sshd:session): session closed for user root Jan 18 11:32:21 managed-node3 sshd-session[4389]: pam_systemd(sshd:session): Failed to release session: No session '3' known Jan 18 11:32:21 managed-node3 systemd[1]: session-5.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit session-5.scope has successfully entered the 'dead' state. Jan 18 11:32:21 managed-node3 sshd-session[4389]: pam_unix(sshd:session): session closed for user root Jan 18 11:32:21 managed-node3 systemd[1]: Stopped session-5.scope - Session 5 of User root. ░░ Subject: A stop job for unit session-5.scope has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit session-5.scope has finished. ░░ ░░ The job identifier is 1812 and the job result is done. Jan 18 11:32:21 managed-node3 systemd[1]: session-5.scope: Consumed 2min 19.811s CPU time, 396.9M memory peak. ░░ Subject: Resources consumed by unit runtime ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit session-5.scope completed and consumed the indicated resources. Jan 18 11:32:21 managed-node3 systemd[1]: session-3.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit session-3.scope has successfully entered the 'dead' state. Jan 18 11:32:21 managed-node3 systemd[1]: Stopped session-3.scope - Session 3 of User root. ░░ Subject: A stop job for unit session-3.scope has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit session-3.scope has finished. ░░ ░░ The job identifier is 1811 and the job result is done. Jan 18 11:32:21 managed-node3 systemd[1]: session-3.scope: Consumed 3.169s CPU time, 86.3M memory peak. ░░ Subject: Resources consumed by unit runtime ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit session-3.scope completed and consumed the indicated resources. Jan 18 11:32:21 managed-node3 systemd[1]: Stopping user@0.service - User Manager for UID 0... ░░ Subject: A stop job for unit user@0.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit user@0.service has begun execution. ░░ ░░ The job identifier is 1810. Jan 18 11:32:21 managed-node3 systemd[4342]: Activating special unit exit.target... Jan 18 11:32:21 managed-node3 systemd[4342]: Removed slice background.slice - User Background Tasks Slice. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 34 and the job result is done. Jan 18 11:32:21 managed-node3 systemd[4342]: Stopped target default.target - Main User Target. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 32 and the job result is done. Jan 18 11:32:21 managed-node3 systemd[4342]: Stopped target basic.target - Basic System. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 31 and the job result is done. Jan 18 11:32:21 managed-node3 systemd[4342]: Stopped target paths.target - Paths. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 29 and the job result is done. Jan 18 11:32:21 managed-node3 systemd[4342]: Stopped target sockets.target - Sockets. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 30 and the job result is done. Jan 18 11:32:21 managed-node3 systemd[4342]: Stopped target timers.target - Timers. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 25 and the job result is done. Jan 18 11:32:21 managed-node3 systemd[4342]: Stopped systemd-tmpfiles-clean.timer - Daily Cleanup of User's Temporary Directories. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 24 and the job result is done. Jan 18 11:32:21 managed-node3 systemd[4342]: Closed dbus.socket - D-Bus User Message Bus Socket. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 26 and the job result is done. Jan 18 11:32:21 managed-node3 systemd[4342]: Stopped systemd-tmpfiles-setup.service - Create User Files and Directories. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 23 and the job result is done. Jan 18 11:32:21 managed-node3 systemd[4342]: Removed slice app.slice - User Application Slice. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 28 and the job result is done. Jan 18 11:32:21 managed-node3 systemd[4342]: Reached target shutdown.target - Shutdown. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 19. Jan 18 11:32:21 managed-node3 systemd[4342]: Finished systemd-exit.service - Exit the Session. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 18. Jan 18 11:32:21 managed-node3 systemd[4342]: Reached target exit.target - Exit the Session. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 17. Jan 18 11:32:21 managed-node3 systemd[1]: user@0.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit user@0.service has successfully entered the 'dead' state. Jan 18 11:32:21 managed-node3 systemd[1]: Stopped user@0.service - User Manager for UID 0. ░░ Subject: A stop job for unit user@0.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit user@0.service has finished. ░░ ░░ The job identifier is 1810 and the job result is done. Jan 18 11:32:21 managed-node3 systemd[1]: Stopping user-runtime-dir@0.service - User Runtime Directory /run/user/0... ░░ Subject: A stop job for unit user-runtime-dir@0.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit user-runtime-dir@0.service has begun execution. ░░ ░░ The job identifier is 1809. Jan 18 11:32:21 managed-node3 systemd[1]: run-user-0.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-user-0.mount has successfully entered the 'dead' state. Jan 18 11:32:21 managed-node3 systemd[1]: user-runtime-dir@0.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit user-runtime-dir@0.service has successfully entered the 'dead' state. Jan 18 11:32:21 managed-node3 systemd[1]: Stopped user-runtime-dir@0.service - User Runtime Directory /run/user/0. ░░ Subject: A stop job for unit user-runtime-dir@0.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit user-runtime-dir@0.service has finished. ░░ ░░ The job identifier is 1809 and the job result is done. Jan 18 11:32:21 managed-node3 systemd[1]: Removed slice user-0.slice - User Slice of UID 0. ░░ Subject: A stop job for unit user-0.slice has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit user-0.slice has finished. ░░ ░░ The job identifier is 1813 and the job result is done. Jan 18 11:32:21 managed-node3 systemd[1]: user-0.slice: Consumed 2min 23.358s CPU time, 462.5M memory peak. ░░ Subject: Resources consumed by unit runtime ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit user-0.slice completed and consumed the indicated resources. Jan 18 11:32:21 managed-node3 sshd-session[57075]: Accepted publickey for root from 10.31.43.51 port 35430 ssh2: RSA SHA256:9j1blwt3wcrRiGYZQ7ZGu9axm3cDklH6/z4c+Ee8CzE Jan 18 11:32:21 managed-node3 systemd[1]: Created slice user-0.slice - User Slice of UID 0. ░░ Subject: A start job for unit user-0.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user-0.slice has finished successfully. ░░ ░░ The job identifier is 1816. Jan 18 11:32:21 managed-node3 systemd[1]: Starting user-runtime-dir@0.service - User Runtime Directory /run/user/0... ░░ Subject: A start job for unit user-runtime-dir@0.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user-runtime-dir@0.service has begun execution. ░░ ░░ The job identifier is 1815. Jan 18 11:32:21 managed-node3 systemd-logind[55930]: New session 7 of user root. ░░ Subject: A new session 7 has been created for user root ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A new session with the ID 7 has been created for the user root. ░░ ░░ The leading process of the session is 57075. Jan 18 11:32:21 managed-node3 systemd[1]: Finished user-runtime-dir@0.service - User Runtime Directory /run/user/0. ░░ Subject: A start job for unit user-runtime-dir@0.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user-runtime-dir@0.service has finished successfully. ░░ ░░ The job identifier is 1815. Jan 18 11:32:21 managed-node3 systemd[1]: Starting user@0.service - User Manager for UID 0... ░░ Subject: A start job for unit user@0.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user@0.service has begun execution. ░░ ░░ The job identifier is 1895. Jan 18 11:32:21 managed-node3 systemd-logind[55930]: New session 8 of user root. ░░ Subject: A new session 8 has been created for user root ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A new session with the ID 8 has been created for the user root. ░░ ░░ The leading process of the session is 57080. Jan 18 11:32:21 managed-node3 (systemd)[57080]: pam_unix(systemd-user:session): session opened for user root(uid=0) by root(uid=0) Jan 18 11:32:21 managed-node3 systemd[57080]: Queued start job for default target default.target. Jan 18 11:32:21 managed-node3 systemd[57080]: Created slice app.slice - User Application Slice. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 6. Jan 18 11:32:21 managed-node3 systemd[57080]: grub-boot-success.timer - Mark boot as successful after the user session has run 2 minutes was skipped because of an unmet condition check (ConditionUser=!@system). ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 12. Jan 18 11:32:21 managed-node3 systemd[57080]: Started systemd-tmpfiles-clean.timer - Daily Cleanup of User's Temporary Directories. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 11. Jan 18 11:32:21 managed-node3 systemd[57080]: Reached target paths.target - Paths. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 3. Jan 18 11:32:21 managed-node3 systemd[57080]: Reached target timers.target - Timers. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 10. Jan 18 11:32:21 managed-node3 systemd[57080]: Starting dbus.socket - D-Bus User Message Bus Socket... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 9. Jan 18 11:32:21 managed-node3 systemd[57080]: Starting systemd-tmpfiles-setup.service - Create User Files and Directories... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 5. Jan 18 11:32:21 managed-node3 systemd[57080]: Finished systemd-tmpfiles-setup.service - Create User Files and Directories. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 5. Jan 18 11:32:21 managed-node3 systemd[57080]: Listening on dbus.socket - D-Bus User Message Bus Socket. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 9. Jan 18 11:32:21 managed-node3 systemd[57080]: Reached target sockets.target - Sockets. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 8. Jan 18 11:32:21 managed-node3 systemd[57080]: Reached target basic.target - Basic System. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 2. Jan 18 11:32:21 managed-node3 systemd[57080]: Reached target default.target - Main User Target. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 1. Jan 18 11:32:21 managed-node3 systemd[57080]: Startup finished in 104ms. ░░ Subject: User manager start-up is now complete ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The user manager instance for user 0 has been started. All services queued ░░ for starting have been started. Note that other services might still be starting ░░ up or be started at any later time. ░░ ░░ Startup of the manager took 104805 microseconds. Jan 18 11:32:21 managed-node3 systemd[1]: Started user@0.service - User Manager for UID 0. ░░ Subject: A start job for unit user@0.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user@0.service has finished successfully. ░░ ░░ The job identifier is 1895. Jan 18 11:32:21 managed-node3 systemd[1]: Started session-7.scope - Session 7 of User root. ░░ Subject: A start job for unit session-7.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit session-7.scope has finished successfully. ░░ ░░ The job identifier is 1976. Jan 18 11:32:21 managed-node3 sshd-session[57075]: pam_unix(sshd:session): session opened for user root(uid=0) by root(uid=0) Jan 18 11:32:22 managed-node3 python3.12[57207]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:32:23 managed-node3 python3.12[57338]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/quadlet-basic.network follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 18 11:32:23 managed-node3 python3.12[57443]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/systemd/quadlet-basic.network owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1737217943.0027647-16717-122948793557998/.source.network _original_basename=._fc91ogv follow=False checksum=19c9b17be2af9b9deca5c3bd327f048966750682 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:32:24 managed-node3 python3.12[57574]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 18 11:32:24 managed-node3 systemd[1]: Reload requested from client PID 57575 ('systemctl') (unit session-7.scope)... Jan 18 11:32:24 managed-node3 systemd[1]: Reloading... Jan 18 11:32:24 managed-node3 systemd-rc-local-generator[57617]: /etc/rc.d/rc.local is not marked executable, skipping. Jan 18 11:32:24 managed-node3 systemd-ssh-generator[57619]: Failed to query local AF_VSOCK CID: Permission denied Jan 18 11:32:24 managed-node3 (sd-exec-[57592]: /usr/lib/systemd/system-generators/systemd-ssh-generator failed with exit status 1. Jan 18 11:32:24 managed-node3 systemd[1]: Reloading finished in 200 ms. Jan 18 11:32:24 managed-node3 systemd[1]: Starting logrotate.service - Rotate log files... ░░ Subject: A start job for unit logrotate.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit logrotate.service has begun execution. ░░ ░░ The job identifier is 2058. Jan 18 11:32:24 managed-node3 systemd[1]: logrotate.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit logrotate.service has successfully entered the 'dead' state. Jan 18 11:32:24 managed-node3 systemd[1]: Finished logrotate.service - Rotate log files. ░░ Subject: A start job for unit logrotate.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit logrotate.service has finished successfully. ░░ ░░ The job identifier is 2058. Jan 18 11:32:24 managed-node3 python3.12[57761]: ansible-systemd Invoked with name=quadlet-basic-network.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Jan 18 11:32:24 managed-node3 systemd[1]: Starting quadlet-basic-network.service... ░░ Subject: A start job for unit quadlet-basic-network.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-basic-network.service has begun execution. ░░ ░░ The job identifier is 2136. Jan 18 11:32:25 managed-node3 quadlet-basic-network[57765]: quadlet-basic-name Jan 18 11:32:25 managed-node3 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:32:25 managed-node3 systemd[1]: Finished quadlet-basic-network.service. ░░ Subject: A start job for unit quadlet-basic-network.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-basic-network.service has finished successfully. ░░ ░░ The job identifier is 2136. Jan 18 11:32:25 managed-node3 python3.12[57903]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:32:27 managed-node3 python3.12[58036]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:32:27 managed-node3 python3.12[58167]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/quadlet-basic-unused-network.network follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 18 11:32:28 managed-node3 python3.12[58272]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1737217947.4991572-16832-22648816520572/.source.network dest=/etc/containers/systemd/quadlet-basic-unused-network.network owner=root group=0 mode=0644 follow=False _original_basename=systemd.j2 checksum=52c9d75ecaf81203cc1f1a3b1dd00fcd25067b01 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:32:28 managed-node3 python3.12[58403]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 18 11:32:28 managed-node3 systemd[1]: Reload requested from client PID 58404 ('systemctl') (unit session-7.scope)... Jan 18 11:32:28 managed-node3 systemd[1]: Reloading... Jan 18 11:32:28 managed-node3 systemd-rc-local-generator[58445]: /etc/rc.d/rc.local is not marked executable, skipping. Jan 18 11:32:28 managed-node3 systemd-ssh-generator[58448]: Failed to query local AF_VSOCK CID: Permission denied Jan 18 11:32:28 managed-node3 (sd-exec-[58421]: /usr/lib/systemd/system-generators/systemd-ssh-generator failed with exit status 1. Jan 18 11:32:28 managed-node3 systemd[1]: Reloading finished in 197 ms. Jan 18 11:32:29 managed-node3 python3.12[58586]: ansible-systemd Invoked with name=quadlet-basic-unused-network-network.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Jan 18 11:32:29 managed-node3 systemd[1]: Starting quadlet-basic-unused-network-network.service... ░░ Subject: A start job for unit quadlet-basic-unused-network-network.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-basic-unused-network-network.service has begun execution. ░░ ░░ The job identifier is 2220. Jan 18 11:32:29 managed-node3 quadlet-basic-unused-network-network[58590]: systemd-quadlet-basic-unused-network Jan 18 11:32:29 managed-node3 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:32:29 managed-node3 systemd[1]: Finished quadlet-basic-unused-network-network.service. ░░ Subject: A start job for unit quadlet-basic-unused-network-network.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-basic-unused-network-network.service has finished successfully. ░░ ░░ The job identifier is 2220. Jan 18 11:32:30 managed-node3 python3.12[58728]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:32:31 managed-node3 python3.12[58861]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:32:32 managed-node3 python3.12[58992]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/quadlet-basic-mysql.volume follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 18 11:32:32 managed-node3 python3.12[59097]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1737217952.0192504-16961-155479983655334/.source.volume dest=/etc/containers/systemd/quadlet-basic-mysql.volume owner=root group=0 mode=0644 follow=False _original_basename=systemd.j2 checksum=90a3571bfc7670328fe3f8fb625585613dbd9c4a backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:32:33 managed-node3 python3.12[59228]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 18 11:32:33 managed-node3 systemd[1]: Reload requested from client PID 59229 ('systemctl') (unit session-7.scope)... Jan 18 11:32:33 managed-node3 systemd[1]: Reloading... Jan 18 11:32:33 managed-node3 systemd-rc-local-generator[59272]: /etc/rc.d/rc.local is not marked executable, skipping. Jan 18 11:32:33 managed-node3 systemd-ssh-generator[59276]: Failed to query local AF_VSOCK CID: Permission denied Jan 18 11:32:33 managed-node3 (sd-exec-[59246]: /usr/lib/systemd/system-generators/systemd-ssh-generator failed with exit status 1. Jan 18 11:32:33 managed-node3 systemd[1]: Reloading finished in 198 ms. Jan 18 11:32:33 managed-node3 python3.12[59411]: ansible-systemd Invoked with name=quadlet-basic-mysql-volume.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Jan 18 11:32:33 managed-node3 systemd[1]: Starting quadlet-basic-mysql-volume.service... ░░ Subject: A start job for unit quadlet-basic-mysql-volume.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-basic-mysql-volume.service has begun execution. ░░ ░░ The job identifier is 2304. Jan 18 11:32:34 managed-node3 podman[59415]: 2025-01-18 11:32:34.016803672 -0500 EST m=+0.030009262 volume create quadlet-basic-mysql-name Jan 18 11:32:34 managed-node3 quadlet-basic-mysql-volume[59415]: quadlet-basic-mysql-name Jan 18 11:32:34 managed-node3 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:32:34 managed-node3 systemd[1]: Finished quadlet-basic-mysql-volume.service. ░░ Subject: A start job for unit quadlet-basic-mysql-volume.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-basic-mysql-volume.service has finished successfully. ░░ ░░ The job identifier is 2304. Jan 18 11:32:34 managed-node3 python3.12[59554]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:32:36 managed-node3 python3.12[59687]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:32:37 managed-node3 python3.12[59818]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/quadlet-basic-unused-volume.volume follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 18 11:32:37 managed-node3 python3.12[59923]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1737217957.1720717-17150-132972819108597/.source.volume dest=/etc/containers/systemd/quadlet-basic-unused-volume.volume owner=root group=0 mode=0644 follow=False _original_basename=systemd.j2 checksum=fd0ae560360afa5541b866560b1e849d25e216ef backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:32:38 managed-node3 python3.12[60054]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 18 11:32:38 managed-node3 systemd[1]: Reload requested from client PID 60055 ('systemctl') (unit session-7.scope)... Jan 18 11:32:38 managed-node3 systemd[1]: Reloading... Jan 18 11:32:38 managed-node3 systemd-rc-local-generator[60097]: /etc/rc.d/rc.local is not marked executable, skipping. Jan 18 11:32:38 managed-node3 systemd-ssh-generator[60099]: Failed to query local AF_VSOCK CID: Permission denied Jan 18 11:32:38 managed-node3 (sd-exec-[60072]: /usr/lib/systemd/system-generators/systemd-ssh-generator failed with exit status 1. Jan 18 11:32:38 managed-node3 systemd[1]: Reloading finished in 192 ms. Jan 18 11:32:39 managed-node3 python3.12[60237]: ansible-systemd Invoked with name=quadlet-basic-unused-volume-volume.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Jan 18 11:32:39 managed-node3 systemd[1]: Starting quadlet-basic-unused-volume-volume.service... ░░ Subject: A start job for unit quadlet-basic-unused-volume-volume.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-basic-unused-volume-volume.service has begun execution. ░░ ░░ The job identifier is 2388. Jan 18 11:32:39 managed-node3 podman[60241]: 2025-01-18 11:32:39.286991102 -0500 EST m=+0.029030643 volume create systemd-quadlet-basic-unused-volume Jan 18 11:32:39 managed-node3 quadlet-basic-unused-volume-volume[60241]: systemd-quadlet-basic-unused-volume Jan 18 11:32:39 managed-node3 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:32:39 managed-node3 systemd[1]: Finished quadlet-basic-unused-volume-volume.service. ░░ Subject: A start job for unit quadlet-basic-unused-volume-volume.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-basic-unused-volume-volume.service has finished successfully. ░░ ░░ The job identifier is 2388. Jan 18 11:32:40 managed-node3 python3.12[60380]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 18 11:32:41 managed-node3 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:32:44 managed-node3 systemd[1]: var-lib-containers-storage-overlay-compat4009507287-lower\x2dmapped.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-compat4009507287-lower\x2dmapped.mount has successfully entered the 'dead' state. Jan 18 11:32:48 managed-node3 podman[60521]: 2025-01-18 11:32:48.580206679 -0500 EST m=+6.738848240 image pull dd3b2a5dcb48ff61113592ed5ddd762581be4387c7bc552375a2159422aa6bf5 quay.io/linux-system-roles/mysql:5.6 Jan 18 11:32:48 managed-node3 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:32:48 managed-node3 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:32:49 managed-node3 python3.12[60831]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:32:49 managed-node3 python3.12[60962]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/quadlet-basic-mysql.container follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 18 11:32:49 managed-node3 python3.12[61067]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1737217969.326097-17547-170001703458337/.source.container dest=/etc/containers/systemd/quadlet-basic-mysql.container owner=root group=0 mode=0644 follow=False _original_basename=systemd.j2 checksum=0b6cac7929623f1059e78ef39b8b0a25169b28a6 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jan 18 11:32:50 managed-node3 python3.12[61198]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 18 11:32:50 managed-node3 systemd[1]: Reload requested from client PID 61199 ('systemctl') (unit session-7.scope)... Jan 18 11:32:50 managed-node3 systemd[1]: Reloading... Jan 18 11:32:50 managed-node3 systemd-ssh-generator[61244]: Failed to query local AF_VSOCK CID: Permission denied Jan 18 11:32:50 managed-node3 systemd-rc-local-generator[61241]: /etc/rc.d/rc.local is not marked executable, skipping. Jan 18 11:32:50 managed-node3 (sd-exec-[61216]: /usr/lib/systemd/system-generators/systemd-ssh-generator failed with exit status 1. Jan 18 11:32:50 managed-node3 systemd[1]: Reloading finished in 196 ms. Jan 18 11:32:51 managed-node3 python3.12[61381]: ansible-systemd Invoked with name=quadlet-basic-mysql.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Jan 18 11:32:51 managed-node3 systemd[1]: Starting quadlet-basic-mysql.service... ░░ Subject: A start job for unit quadlet-basic-mysql.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-basic-mysql.service has begun execution. ░░ ░░ The job identifier is 2472. Jan 18 11:32:51 managed-node3 podman[61385]: 2025-01-18 11:32:51.363716909 -0500 EST m=+0.048670833 container create 45b253ddce8efbee41c5ee3df1480d24113ec7ed8c8ca272bd1fad1a7d78289d (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-basic-mysql-name, PODMAN_SYSTEMD_UNIT=quadlet-basic-mysql.service) Jan 18 11:32:51 managed-node3 kernel: podman1: port 1(veth0) entered blocking state Jan 18 11:32:51 managed-node3 kernel: podman1: port 1(veth0) entered disabled state Jan 18 11:32:51 managed-node3 kernel: veth0: entered allmulticast mode Jan 18 11:32:51 managed-node3 kernel: veth0: entered promiscuous mode Jan 18 11:32:51 managed-node3 kernel: podman1: port 1(veth0) entered blocking state Jan 18 11:32:51 managed-node3 kernel: podman1: port 1(veth0) entered forwarding state Jan 18 11:32:51 managed-node3 NetworkManager[712]: [1737217971.3945] manager: (podman1): new Bridge device (/org/freedesktop/NetworkManager/Devices/3) Jan 18 11:32:51 managed-node3 NetworkManager[712]: [1737217971.3956] manager: (veth0): new Veth device (/org/freedesktop/NetworkManager/Devices/4) Jan 18 11:32:51 managed-node3 NetworkManager[712]: [1737217971.3968] device (veth0): carrier: link connected Jan 18 11:32:51 managed-node3 NetworkManager[712]: [1737217971.3970] device (podman1): carrier: link connected Jan 18 11:32:51 managed-node3 (udev-worker)[61396]: Network interface NamePolicy= disabled on kernel command line. Jan 18 11:32:51 managed-node3 (udev-worker)[61398]: Network interface NamePolicy= disabled on kernel command line. Jan 18 11:32:51 managed-node3 NetworkManager[712]: [1737217971.4193] device (podman1): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external') Jan 18 11:32:51 managed-node3 NetworkManager[712]: [1737217971.4199] device (podman1): state change: unavailable -> disconnected (reason 'connection-assumed', managed-type: 'external') Jan 18 11:32:51 managed-node3 NetworkManager[712]: [1737217971.4205] device (podman1): Activation: starting connection 'podman1' (013eb06d-3785-4327-8306-7db7b8655ea3) Jan 18 11:32:51 managed-node3 NetworkManager[712]: [1737217971.4206] device (podman1): state change: disconnected -> prepare (reason 'none', managed-type: 'external') Jan 18 11:32:51 managed-node3 NetworkManager[712]: [1737217971.4208] device (podman1): state change: prepare -> config (reason 'none', managed-type: 'external') Jan 18 11:32:51 managed-node3 NetworkManager[712]: [1737217971.4210] device (podman1): state change: config -> ip-config (reason 'none', managed-type: 'external') Jan 18 11:32:51 managed-node3 NetworkManager[712]: [1737217971.4212] device (podman1): state change: ip-config -> ip-check (reason 'none', managed-type: 'external') Jan 18 11:32:51 managed-node3 podman[61385]: 2025-01-18 11:32:51.343650483 -0500 EST m=+0.028604240 image pull dd3b2a5dcb48ff61113592ed5ddd762581be4387c7bc552375a2159422aa6bf5 quay.io/linux-system-roles/mysql:5.6 Jan 18 11:32:51 managed-node3 systemd[1]: Starting NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service... ░░ Subject: A start job for unit NetworkManager-dispatcher.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has begun execution. ░░ ░░ The job identifier is 2558. Jan 18 11:32:51 managed-node3 systemd[1]: Started run-p61408-i61708.scope - [systemd-run] /usr/libexec/podman/aardvark-dns --config /run/containers/networks/aardvark-dns -p 53 run. ░░ Subject: A start job for unit run-p61408-i61708.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit run-p61408-i61708.scope has finished successfully. ░░ ░░ The job identifier is 2637. Jan 18 11:32:51 managed-node3 systemd[1]: Started NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service. ░░ Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has finished successfully. ░░ ░░ The job identifier is 2558. Jan 18 11:32:51 managed-node3 NetworkManager[712]: [1737217971.4671] device (podman1): state change: ip-check -> secondaries (reason 'none', managed-type: 'external') Jan 18 11:32:51 managed-node3 NetworkManager[712]: [1737217971.4677] device (podman1): state change: secondaries -> activated (reason 'none', managed-type: 'external') Jan 18 11:32:51 managed-node3 NetworkManager[712]: [1737217971.4685] device (podman1): Activation: successful, device activated. Jan 18 11:32:51 managed-node3 podman[61385]: 2025-01-18 11:32:51.529374793 -0500 EST m=+0.214328571 container init 45b253ddce8efbee41c5ee3df1480d24113ec7ed8c8ca272bd1fad1a7d78289d (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-basic-mysql-name, PODMAN_SYSTEMD_UNIT=quadlet-basic-mysql.service) Jan 18 11:32:51 managed-node3 podman[61385]: 2025-01-18 11:32:51.53334822 -0500 EST m=+0.218302080 container start 45b253ddce8efbee41c5ee3df1480d24113ec7ed8c8ca272bd1fad1a7d78289d (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-basic-mysql-name, PODMAN_SYSTEMD_UNIT=quadlet-basic-mysql.service) Jan 18 11:32:51 managed-node3 systemd[1]: Started quadlet-basic-mysql.service. ░░ Subject: A start job for unit quadlet-basic-mysql.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-basic-mysql.service has finished successfully. ░░ ░░ The job identifier is 2472. Jan 18 11:32:51 managed-node3 quadlet-basic-mysql[61385]: 45b253ddce8efbee41c5ee3df1480d24113ec7ed8c8ca272bd1fad1a7d78289d Jan 18 11:32:51 managed-node3 podman[61445]: 2025-01-18 11:32:51.588064512 -0500 EST m=+0.033330308 container died 45b253ddce8efbee41c5ee3df1480d24113ec7ed8c8ca272bd1fad1a7d78289d (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-basic-mysql-name, PODMAN_SYSTEMD_UNIT=quadlet-basic-mysql.service) Jan 18 11:32:51 managed-node3 systemd[1]: run-p61408-i61708.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-p61408-i61708.scope has successfully entered the 'dead' state. Jan 18 11:32:51 managed-node3 kernel: podman1: port 1(veth0) entered disabled state Jan 18 11:32:51 managed-node3 kernel: veth0 (unregistering): left allmulticast mode Jan 18 11:32:51 managed-node3 kernel: veth0 (unregistering): left promiscuous mode Jan 18 11:32:51 managed-node3 kernel: podman1: port 1(veth0) entered disabled state Jan 18 11:32:51 managed-node3 NetworkManager[712]: [1737217971.6289] device (podman1): state change: activated -> unmanaged (reason 'unmanaged', managed-type: 'removed') Jan 18 11:32:51 managed-node3 systemd[1]: run-netns-netns\x2dd84fb293\x2d29e9\x2d400c\x2d5197\x2d3fbaf1ddca67.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-netns-netns\x2dd84fb293\x2d29e9\x2d400c\x2d5197\x2d3fbaf1ddca67.mount has successfully entered the 'dead' state. Jan 18 11:32:51 managed-node3 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-45b253ddce8efbee41c5ee3df1480d24113ec7ed8c8ca272bd1fad1a7d78289d-userdata-shm.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay\x2dcontainers-45b253ddce8efbee41c5ee3df1480d24113ec7ed8c8ca272bd1fad1a7d78289d-userdata-shm.mount has successfully entered the 'dead' state. Jan 18 11:32:51 managed-node3 podman[61445]: 2025-01-18 11:32:51.690924138 -0500 EST m=+0.136189660 container remove 45b253ddce8efbee41c5ee3df1480d24113ec7ed8c8ca272bd1fad1a7d78289d (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-basic-mysql-name, PODMAN_SYSTEMD_UNIT=quadlet-basic-mysql.service) Jan 18 11:32:51 managed-node3 systemd[1]: quadlet-basic-mysql.service: Main process exited, code=exited, status=1/FAILURE ░░ Subject: Unit process exited ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ An ExecStart= process belonging to unit quadlet-basic-mysql.service has exited. ░░ ░░ The process' exit code is 'exited' and its exit status is 1. Jan 18 11:32:51 managed-node3 systemd[1]: quadlet-basic-mysql.service: Failed with result 'exit-code'. ░░ Subject: Unit failed ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit quadlet-basic-mysql.service has entered the 'failed' state with result 'exit-code'. Jan 18 11:32:52 managed-node3 python3.12[61605]: ansible-ansible.legacy.command Invoked with _raw_params=cat /etc/containers/systemd/quadlet-basic-mysql.container _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:32:52 managed-node3 systemd[1]: var-lib-containers-storage-overlay-59f2bc3c8854c21d92f80466ceeb49e49e40f669801db812096425e9446f39b0-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-59f2bc3c8854c21d92f80466ceeb49e49e40f669801db812096425e9446f39b0-merged.mount has successfully entered the 'dead' state. Jan 18 11:32:52 managed-node3 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:32:52 managed-node3 python3.12[61737]: ansible-ansible.legacy.command Invoked with _raw_params=cat /etc/containers/systemd/quadlet-basic.network _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:32:52 managed-node3 python3.12[61869]: ansible-ansible.legacy.command Invoked with _raw_params=cat /etc/containers/systemd/quadlet-basic-mysql.volume _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:32:53 managed-node3 python3.12[62001]: ansible-ansible.legacy.command Invoked with _raw_params=podman exec quadlet-basic-mysql-name cat /tmp/test.json _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:32:53 managed-node3 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:32:53 managed-node3 python3.12[62140]: ansible-ansible.legacy.command Invoked with _raw_params=set -x set -o pipefail exec 1>&2 #podman volume rm --all #podman network prune -f podman volume ls podman network ls podman secret ls podman container ls podman pod ls podman images systemctl list-units | grep quadlet _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:32:53 managed-node3 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:32:53 managed-node3 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:32:54 managed-node3 python3.12[62317]: ansible-ansible.legacy.command Invoked with _raw_params=grep type=AVC /var/log/audit/audit.log _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 18 11:32:54 managed-node3 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 18 11:32:54 managed-node3 python3.12[62449]: ansible-ansible.legacy.command Invoked with _raw_params=journalctl -ex _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None TASK [Cleanup user] ************************************************************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_basic.yml:299 Saturday 18 January 2025 11:32:55 -0500 (0:00:00.523) 0:03:36.272 ****** included: fedora.linux_system_roles.podman for managed-node3 TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:3 Saturday 18 January 2025 11:32:55 -0500 (0:00:00.204) 0:03:36.477 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure ansible_facts used by role] **** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:3 Saturday 18 January 2025 11:32:55 -0500 (0:00:00.068) 0:03:36.545 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if system is ostree] ************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:11 Saturday 18 January 2025 11:32:55 -0500 (0:00:00.046) 0:03:36.591 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag to indicate system is ostree] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:16 Saturday 18 January 2025 11:32:55 -0500 (0:00:00.034) 0:03:36.626 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:23 Saturday 18 January 2025 11:32:55 -0500 (0:00:00.035) 0:03:36.662 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag if transactional-update exists] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:28 Saturday 18 January 2025 11:32:55 -0500 (0:00:00.032) 0:03:36.695 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:32 Saturday 18 January 2025 11:32:55 -0500 (0:00:00.036) 0:03:36.731 ****** ok: [managed-node3] => (item=RedHat.yml) => { "ansible_facts": { "__podman_packages": [ "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/vars/RedHat.yml" ], "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml" } skipping: [managed-node3] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node3] => (item=CentOS_10.yml) => { "ansible_facts": { "__podman_packages": [ "iptables-nft", "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } ok: [managed-node3] => (item=CentOS_10.yml) => { "ansible_facts": { "__podman_packages": [ "iptables-nft", "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } TASK [fedora.linux_system_roles.podman : Gather the package facts] ************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 Saturday 18 January 2025 11:32:55 -0500 (0:00:00.080) 0:03:36.811 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Enable copr if requested] ************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:10 Saturday 18 January 2025 11:32:56 -0500 (0:00:00.885) 0:03:37.697 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_use_copr | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure required packages are installed] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:14 Saturday 18 January 2025 11:32:56 -0500 (0:00:00.034) 0:03:37.731 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "(__podman_packages | difference(ansible_facts.packages))", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:28 Saturday 18 January 2025 11:32:56 -0500 (0:00:00.039) 0:03:37.771 ****** skipping: [managed-node3] => { "false_condition": "__podman_is_transactional | d(false)" } TASK [fedora.linux_system_roles.podman : Reboot transactional update systems] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:33 Saturday 18 January 2025 11:32:56 -0500 (0:00:00.035) 0:03:37.806 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if reboot is needed and not set] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:38 Saturday 18 January 2025 11:32:56 -0500 (0:00:00.034) 0:03:37.841 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get podman version] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:46 Saturday 18 January 2025 11:32:56 -0500 (0:00:00.102) 0:03:37.943 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "--version" ], "delta": "0:00:00.026005", "end": "2025-01-18 11:32:57.037271", "rc": 0, "start": "2025-01-18 11:32:57.011266" } STDOUT: podman version 5.3.1 TASK [fedora.linux_system_roles.podman : Set podman version] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:52 Saturday 18 January 2025 11:32:57 -0500 (0:00:00.413) 0:03:38.356 ****** ok: [managed-node3] => { "ansible_facts": { "podman_version": "5.3.1" }, "changed": false } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.2 or later] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:56 Saturday 18 January 2025 11:32:57 -0500 (0:00:00.039) 0:03:38.396 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_version is version(\"4.2\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:63 Saturday 18 January 2025 11:32:57 -0500 (0:00:00.034) 0:03:38.431 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_version is version(\"4.4\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:73 Saturday 18 January 2025 11:32:57 -0500 (0:00:00.092) 0:03:38.523 ****** META: end_host conditional evaluated to False, continuing execution for managed-node3 skipping: [managed-node3] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node3" } MSG: end_host conditional evaluated to false, continuing execution for managed-node3 TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:80 Saturday 18 January 2025 11:32:57 -0500 (0:00:00.068) 0:03:38.592 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__has_type_pod or __has_pod_file_ext or __has_pod_file_src_ext or __has_pod_template_src_ext or __has_pod_template_src_ext_j2", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:96 Saturday 18 January 2025 11:32:57 -0500 (0:00:00.065) 0:03:38.657 ****** META: end_host conditional evaluated to False, continuing execution for managed-node3 skipping: [managed-node3] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node3" } MSG: end_host conditional evaluated to false, continuing execution for managed-node3 TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:109 Saturday 18 January 2025 11:32:57 -0500 (0:00:00.063) 0:03:38.721 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 18 January 2025 11:32:57 -0500 (0:00:00.065) 0:03:38.787 ****** ok: [managed-node3] => { "ansible_facts": { "getent_passwd": { "user_quadlet_basic": [ "x", "1111", "1111", "", "/home/user_quadlet_basic", "/bin/bash" ] } }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 18 January 2025 11:32:57 -0500 (0:00:00.434) 0:03:39.221 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 18 January 2025 11:32:58 -0500 (0:00:00.067) 0:03:39.289 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_group": "1111" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 18 January 2025 11:32:58 -0500 (0:00:00.078) 0:03:39.367 ****** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1737217683.9532003, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1737217679.1402063, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9160785, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "1643853349", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 18 January 2025 11:32:58 -0500 (0:00:00.448) 0:03:39.817 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "getsubids", "user_quadlet_basic" ], "delta": "0:00:00.003935", "end": "2025-01-18 11:32:58.896642", "rc": 0, "start": "2025-01-18 11:32:58.892707" } STDOUT: 0: user_quadlet_basic 589824 65536 TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 18 January 2025 11:32:58 -0500 (0:00:00.415) 0:03:40.232 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "getsubids", "-g", "user_quadlet_basic" ], "delta": "0:00:00.005419", "end": "2025-01-18 11:32:59.345752", "rc": 0, "start": "2025-01-18 11:32:59.340333" } STDOUT: 0: user_quadlet_basic 589824 65536 TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 18 January 2025 11:32:59 -0500 (0:00:00.461) 0:03:40.693 ****** ok: [managed-node3] => { "ansible_facts": { "podman_subgid_info": { "user_quadlet_basic": { "range": 65536, "start": 589824 } }, "podman_subuid_info": { "user_quadlet_basic": { "range": 65536, "start": 589824 } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 18 January 2025 11:32:59 -0500 (0:00:00.083) 0:03:40.776 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 18 January 2025 11:32:59 -0500 (0:00:00.150) 0:03:40.927 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 18 January 2025 11:32:59 -0500 (0:00:00.059) 0:03:40.987 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 18 January 2025 11:32:59 -0500 (0:00:00.059) 0:03:41.047 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 18 January 2025 11:32:59 -0500 (0:00:00.059) 0:03:41.107 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set config file paths] **************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:115 Saturday 18 January 2025 11:32:59 -0500 (0:00:00.056) 0:03:41.163 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_container_conf_file": "/root/.config/containers/containers.conf.d/50-systemroles.conf", "__podman_policy_json_file": "/root/.config/containers/policy.json", "__podman_registries_conf_file": "/root/.config/containers/registries.conf.d/50-systemroles.conf", "__podman_storage_conf_file": "/root/.config/containers/storage.conf" }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle container.conf.d] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:124 Saturday 18 January 2025 11:32:59 -0500 (0:00:00.073) 0:03:41.237 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure containers.d exists] *********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:5 Saturday 18 January 2025 11:33:00 -0500 (0:00:00.098) 0:03:41.336 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update container config file] ********* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:13 Saturday 18 January 2025 11:33:00 -0500 (0:00:00.041) 0:03:41.377 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle registries.conf.d] ************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:127 Saturday 18 January 2025 11:33:00 -0500 (0:00:00.041) 0:03:41.418 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure registries.d exists] *********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:5 Saturday 18 January 2025 11:33:00 -0500 (0:00:00.071) 0:03:41.490 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update registries config file] ******** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:13 Saturday 18 January 2025 11:33:00 -0500 (0:00:00.035) 0:03:41.525 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle storage.conf] ****************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:130 Saturday 18 January 2025 11:33:00 -0500 (0:00:00.034) 0:03:41.559 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure storage.conf parent dir exists] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:5 Saturday 18 January 2025 11:33:00 -0500 (0:00:00.067) 0:03:41.627 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update storage config file] *********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:13 Saturday 18 January 2025 11:33:00 -0500 (0:00:00.059) 0:03:41.686 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle policy.json] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:133 Saturday 18 January 2025 11:33:00 -0500 (0:00:00.051) 0:03:41.738 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure policy.json parent dir exists] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:6 Saturday 18 January 2025 11:33:00 -0500 (0:00:00.184) 0:03:41.922 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat the policy.json file] ************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:14 Saturday 18 January 2025 11:33:00 -0500 (0:00:00.040) 0:03:41.963 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get the existing policy.json] ********* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:19 Saturday 18 January 2025 11:33:00 -0500 (0:00:00.042) 0:03:42.005 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Write new policy.json file] *********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:25 Saturday 18 January 2025 11:33:00 -0500 (0:00:00.039) 0:03:42.044 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [Manage firewall for specified ports] ************************************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:139 Saturday 18 January 2025 11:33:00 -0500 (0:00:00.035) 0:03:42.080 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_firewall | length > 0", "skip_reason": "Conditional result was False" } TASK [Manage selinux for specified ports] ************************************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:146 Saturday 18 January 2025 11:33:00 -0500 (0:00:00.032) 0:03:42.113 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_selinux_ports | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Keep track of users that need to cancel linger] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:153 Saturday 18 January 2025 11:33:00 -0500 (0:00:00.034) 0:03:42.147 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_cancel_user_linger": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - present] ******* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:157 Saturday 18 January 2025 11:33:00 -0500 (0:00:00.036) 0:03:42.184 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle credential files - present] **** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:166 Saturday 18 January 2025 11:33:00 -0500 (0:00:00.035) 0:03:42.220 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle secrets] *********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:175 Saturday 18 January 2025 11:33:01 -0500 (0:00:00.045) 0:03:42.265 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml for managed-node3 => (item=(censored due to no_log)) included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml for managed-node3 => (item=(censored due to no_log)) TASK [fedora.linux_system_roles.podman : Set variables part 1] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:3 Saturday 18 January 2025 11:33:01 -0500 (0:00:00.189) 0:03:42.454 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_user": "user_quadlet_basic" }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:7 Saturday 18 January 2025 11:33:01 -0500 (0:00:00.062) 0:03:42.517 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node3 => (item=(censored due to no_log)) TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 18 January 2025 11:33:01 -0500 (0:00:00.083) 0:03:42.601 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 18 January 2025 11:33:01 -0500 (0:00:00.047) 0:03:42.648 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 18 January 2025 11:33:01 -0500 (0:00:00.049) 0:03:42.698 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_group": "1111" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 18 January 2025 11:33:01 -0500 (0:00:00.140) 0:03:42.838 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 18 January 2025 11:33:01 -0500 (0:00:00.035) 0:03:42.874 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 18 January 2025 11:33:01 -0500 (0:00:00.035) 0:03:42.910 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 18 January 2025 11:33:01 -0500 (0:00:00.041) 0:03:42.951 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 18 January 2025 11:33:01 -0500 (0:00:00.057) 0:03:43.009 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 18 January 2025 11:33:01 -0500 (0:00:00.044) 0:03:43.054 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 18 January 2025 11:33:01 -0500 (0:00:00.044) 0:03:43.098 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 18 January 2025 11:33:01 -0500 (0:00:00.044) 0:03:43.142 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 18 January 2025 11:33:01 -0500 (0:00:00.043) 0:03:43.186 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set variables part 2] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:14 Saturday 18 January 2025 11:33:01 -0500 (0:00:00.037) 0:03:43.223 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_rootless": true, "__podman_xdg_runtime_dir": "/run/user/1111" }, "changed": false } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:20 Saturday 18 January 2025 11:33:02 -0500 (0:00:00.045) 0:03:43.269 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 18 January 2025 11:33:02 -0500 (0:00:00.063) 0:03:43.332 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_item_state | d('present') != 'absent'", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 18 January 2025 11:33:02 -0500 (0:00:00.036) 0:03:43.369 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_item_state | d('present') != 'absent'", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 18 January 2025 11:33:02 -0500 (0:00:00.037) 0:03:43.407 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_cancel_user_linger": [ "user_quadlet_basic" ] }, "changed": false } TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:25 Saturday 18 January 2025 11:33:02 -0500 (0:00:00.042) 0:03:43.449 ****** ok: [managed-node3] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.podman : Manage each secret] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:41 Saturday 18 January 2025 11:33:02 -0500 (0:00:00.416) 0:03:43.866 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Set variables part 1] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:3 Saturday 18 January 2025 11:33:02 -0500 (0:00:00.053) 0:03:43.920 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_user": "user_quadlet_basic" }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:7 Saturday 18 January 2025 11:33:02 -0500 (0:00:00.043) 0:03:43.963 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node3 => (item=(censored due to no_log)) TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 18 January 2025 11:33:02 -0500 (0:00:00.146) 0:03:44.110 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 18 January 2025 11:33:02 -0500 (0:00:00.039) 0:03:44.149 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 18 January 2025 11:33:02 -0500 (0:00:00.040) 0:03:44.190 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_group": "1111" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 18 January 2025 11:33:02 -0500 (0:00:00.047) 0:03:44.237 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 18 January 2025 11:33:03 -0500 (0:00:00.035) 0:03:44.273 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 18 January 2025 11:33:03 -0500 (0:00:00.039) 0:03:44.313 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 18 January 2025 11:33:03 -0500 (0:00:00.045) 0:03:44.358 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 18 January 2025 11:33:03 -0500 (0:00:00.040) 0:03:44.398 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 18 January 2025 11:33:03 -0500 (0:00:00.039) 0:03:44.438 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 18 January 2025 11:33:03 -0500 (0:00:00.042) 0:03:44.480 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 18 January 2025 11:33:03 -0500 (0:00:00.039) 0:03:44.519 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 18 January 2025 11:33:03 -0500 (0:00:00.036) 0:03:44.556 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set variables part 2] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:14 Saturday 18 January 2025 11:33:03 -0500 (0:00:00.037) 0:03:44.593 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_rootless": true, "__podman_xdg_runtime_dir": "/run/user/1111" }, "changed": false } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:20 Saturday 18 January 2025 11:33:03 -0500 (0:00:00.044) 0:03:44.637 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 18 January 2025 11:33:03 -0500 (0:00:00.064) 0:03:44.701 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_item_state | d('present') != 'absent'", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 18 January 2025 11:33:03 -0500 (0:00:00.042) 0:03:44.744 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_item_state | d('present') != 'absent'", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 18 January 2025 11:33:03 -0500 (0:00:00.044) 0:03:44.789 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_cancel_user_linger": [ "user_quadlet_basic" ] }, "changed": false } TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:25 Saturday 18 January 2025 11:33:03 -0500 (0:00:00.141) 0:03:44.930 ****** ok: [managed-node3] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.podman : Manage each secret] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:41 Saturday 18 January 2025 11:33:04 -0500 (0:00:00.394) 0:03:45.325 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Kubernetes specifications] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:182 Saturday 18 January 2025 11:33:04 -0500 (0:00:00.043) 0:03:45.369 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Quadlet specifications] ******** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:189 Saturday 18 January 2025 11:33:04 -0500 (0:00:00.036) 0:03:45.405 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node3 => (item=(censored due to no_log)) included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node3 => (item=(censored due to no_log)) included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node3 => (item=(censored due to no_log)) included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node3 => (item=(censored due to no_log)) included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node3 => (item=(censored due to no_log)) TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 18 January 2025 11:33:04 -0500 (0:00:00.138) 0:03:45.544 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": { "Container": { "ContainerName": "quadlet-basic-mysql-name", "Environment": [ "FOO=/bin/busybox-extras", "BAZ=test" ], "Image": "quay.io/linux-system-roles/mysql:5.6", "Network": "quadlet-basic.network", "PodmanArgs": "--secret=mysql_container_root_password,type=env,target=MYSQL_ROOT_PASSWORD --secret=json_secret,type=mount,target=/tmp/test.json", "Volume": "quadlet-basic-mysql.volume:/var/lib/mysql" }, "Install": { "WantedBy": "default.target" } }, "__podman_quadlet_str": "", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 18 January 2025 11:33:04 -0500 (0:00:00.046) 0:03:45.590 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "absent", "__podman_systemd_unit_scope": "", "__podman_user": "user_quadlet_basic" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 18 January 2025 11:33:04 -0500 (0:00:00.048) 0:03:45.639 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_quadlet_spec | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 18 January 2025 11:33:04 -0500 (0:00:00.052) 0:03:45.692 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-basic-mysql", "__podman_quadlet_type": "container", "__podman_rootless": true }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 18 January 2025 11:33:04 -0500 (0:00:00.083) 0:03:45.776 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 18 January 2025 11:33:04 -0500 (0:00:00.133) 0:03:45.909 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 18 January 2025 11:33:04 -0500 (0:00:00.079) 0:03:45.988 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 18 January 2025 11:33:04 -0500 (0:00:00.053) 0:03:46.042 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_group": "1111" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 18 January 2025 11:33:04 -0500 (0:00:00.054) 0:03:46.096 ****** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1737217683.9532003, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1737217679.1402063, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9160785, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "1643853349", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 18 January 2025 11:33:05 -0500 (0:00:00.410) 0:03:46.507 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "getsubids", "user_quadlet_basic" ], "delta": "0:00:00.004473", "end": "2025-01-18 11:33:05.589338", "rc": 0, "start": "2025-01-18 11:33:05.584865" } STDOUT: 0: user_quadlet_basic 589824 65536 TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 18 January 2025 11:33:05 -0500 (0:00:00.408) 0:03:46.915 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "getsubids", "-g", "user_quadlet_basic" ], "delta": "0:00:00.005070", "end": "2025-01-18 11:33:06.000391", "rc": 0, "start": "2025-01-18 11:33:05.995321" } STDOUT: 0: user_quadlet_basic 589824 65536 TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 18 January 2025 11:33:06 -0500 (0:00:00.493) 0:03:47.409 ****** ok: [managed-node3] => { "ansible_facts": { "podman_subgid_info": { "user_quadlet_basic": { "range": 65536, "start": 589824 } }, "podman_subuid_info": { "user_quadlet_basic": { "range": 65536, "start": 589824 } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 18 January 2025 11:33:06 -0500 (0:00:00.062) 0:03:47.471 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 18 January 2025 11:33:06 -0500 (0:00:00.036) 0:03:47.508 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 18 January 2025 11:33:06 -0500 (0:00:00.037) 0:03:47.545 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 18 January 2025 11:33:06 -0500 (0:00:00.036) 0:03:47.582 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 18 January 2025 11:33:06 -0500 (0:00:00.037) 0:03:47.619 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 18 January 2025 11:33:06 -0500 (0:00:00.037) 0:03:47.657 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [ "quay.io/linux-system-roles/mysql:5.6" ], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-basic-mysql.service", "__podman_systemd_scope": "user", "__podman_user_home_dir": "/home/user_quadlet_basic", "__podman_xdg_runtime_dir": "/run/user/1111" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 18 January 2025 11:33:06 -0500 (0:00:00.061) 0:03:47.719 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_path": "/home/user_quadlet_basic/.config/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 18 January 2025 11:33:06 -0500 (0:00:00.062) 0:03:47.781 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 18 January 2025 11:33:06 -0500 (0:00:00.058) 0:03:47.840 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_images": [ "quay.io/linux-system-roles/mysql:5.6" ], "__podman_quadlet_file": "/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-mysql.container", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 18 January 2025 11:33:06 -0500 (0:00:00.100) 0:03:47.940 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 18 January 2025 11:33:06 -0500 (0:00:00.064) 0:03:48.005 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4 Saturday 18 January 2025 11:33:06 -0500 (0:00:00.091) 0:03:48.096 ****** ok: [managed-node3] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.podman : Stop and disable service] ************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 Saturday 18 January 2025 11:33:07 -0500 (0:00:00.393) 0:03:48.490 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_rootless or __podman_xdg_stat.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : See if quadlet file exists] *********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:33 Saturday 18 January 2025 11:33:07 -0500 (0:00:00.039) 0:03:48.529 ****** ok: [managed-node3] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:38 Saturday 18 January 2025 11:33:07 -0500 (0:00:00.390) 0:03:48.920 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_quadlet_stat.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Remove quadlet file] ****************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42 Saturday 18 January 2025 11:33:07 -0500 (0:00:00.032) 0:03:48.953 ****** ok: [managed-node3] => { "changed": false, "path": "/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-mysql.container", "state": "absent" } TASK [fedora.linux_system_roles.podman : Refresh systemd] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:48 Saturday 18 January 2025 11:33:08 -0500 (0:00:00.471) 0:03:49.425 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_file_removed is changed", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Remove managed resource] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:58 Saturday 18 January 2025 11:33:08 -0500 (0:00:00.035) 0:03:49.461 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Remove volumes] *********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99 Saturday 18 January 2025 11:33:08 -0500 (0:00:00.037) 0:03:49.498 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Clear parsed podman variable] ********* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:116 Saturday 18 January 2025 11:33:08 -0500 (0:00:00.046) 0:03:49.545 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_parsed": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ******** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:120 Saturday 18 January 2025 11:33:08 -0500 (0:00:00.037) 0:03:49.582 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_rootless or __podman_xdg_stat.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:131 Saturday 18 January 2025 11:33:08 -0500 (0:00:00.039) 0:03:49.621 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 18 January 2025 11:33:08 -0500 (0:00:00.058) 0:03:49.680 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_item_state | d('present') != 'absent'", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 18 January 2025 11:33:08 -0500 (0:00:00.037) 0:03:49.717 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_item_state | d('present') != 'absent'", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 18 January 2025 11:33:08 -0500 (0:00:00.037) 0:03:49.755 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_cancel_user_linger": [ "user_quadlet_basic" ] }, "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - images] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:141 Saturday 18 January 2025 11:33:08 -0500 (0:00:00.042) 0:03:49.797 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_test_debug | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - volumes] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:150 Saturday 18 January 2025 11:33:08 -0500 (0:00:00.037) 0:03:49.834 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_test_debug | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - containers] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:159 Saturday 18 January 2025 11:33:08 -0500 (0:00:00.037) 0:03:49.872 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_test_debug | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - networks] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:168 Saturday 18 January 2025 11:33:08 -0500 (0:00:00.037) 0:03:49.910 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_test_debug | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - secrets] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:177 Saturday 18 January 2025 11:33:08 -0500 (0:00:00.037) 0:03:49.948 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - pods] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:187 Saturday 18 January 2025 11:33:08 -0500 (0:00:00.037) 0:03:49.986 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - services] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 Saturday 18 January 2025 11:33:08 -0500 (0:00:00.037) 0:03:50.023 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_test_debug | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 18 January 2025 11:33:08 -0500 (0:00:00.041) 0:03:50.064 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 18 January 2025 11:33:08 -0500 (0:00:00.048) 0:03:50.113 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": { "Volume": {} }, "__podman_quadlet_str": "", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 18 January 2025 11:33:08 -0500 (0:00:00.117) 0:03:50.230 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "absent", "__podman_systemd_unit_scope": "", "__podman_user": "user_quadlet_basic" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 18 January 2025 11:33:09 -0500 (0:00:00.044) 0:03:50.274 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_quadlet_spec | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 18 January 2025 11:33:09 -0500 (0:00:00.036) 0:03:50.310 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-basic-unused-volume", "__podman_quadlet_type": "volume", "__podman_rootless": true }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 18 January 2025 11:33:09 -0500 (0:00:00.052) 0:03:50.363 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 18 January 2025 11:33:09 -0500 (0:00:00.064) 0:03:50.427 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 18 January 2025 11:33:09 -0500 (0:00:00.039) 0:03:50.467 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 18 January 2025 11:33:09 -0500 (0:00:00.043) 0:03:50.510 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_group": "1111" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 18 January 2025 11:33:09 -0500 (0:00:00.050) 0:03:50.561 ****** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1737217683.9532003, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1737217679.1402063, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9160785, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "1643853349", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 18 January 2025 11:33:09 -0500 (0:00:00.418) 0:03:50.979 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "getsubids", "user_quadlet_basic" ], "delta": "0:00:00.003897", "end": "2025-01-18 11:33:10.101482", "rc": 0, "start": "2025-01-18 11:33:10.097585" } STDOUT: 0: user_quadlet_basic 589824 65536 TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 18 January 2025 11:33:10 -0500 (0:00:00.446) 0:03:51.426 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "getsubids", "-g", "user_quadlet_basic" ], "delta": "0:00:00.005153", "end": "2025-01-18 11:33:10.504869", "rc": 0, "start": "2025-01-18 11:33:10.499716" } STDOUT: 0: user_quadlet_basic 589824 65536 TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 18 January 2025 11:33:10 -0500 (0:00:00.417) 0:03:51.843 ****** ok: [managed-node3] => { "ansible_facts": { "podman_subgid_info": { "user_quadlet_basic": { "range": 65536, "start": 589824 } }, "podman_subuid_info": { "user_quadlet_basic": { "range": 65536, "start": 589824 } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 18 January 2025 11:33:10 -0500 (0:00:00.083) 0:03:51.927 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 18 January 2025 11:33:10 -0500 (0:00:00.060) 0:03:51.987 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 18 January 2025 11:33:10 -0500 (0:00:00.057) 0:03:52.044 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 18 January 2025 11:33:10 -0500 (0:00:00.060) 0:03:52.105 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 18 January 2025 11:33:10 -0500 (0:00:00.058) 0:03:52.163 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 18 January 2025 11:33:11 -0500 (0:00:00.086) 0:03:52.250 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-basic-unused-volume-volume.service", "__podman_systemd_scope": "user", "__podman_user_home_dir": "/home/user_quadlet_basic", "__podman_xdg_runtime_dir": "/run/user/1111" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 18 January 2025 11:33:11 -0500 (0:00:00.094) 0:03:52.344 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_path": "/home/user_quadlet_basic/.config/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 18 January 2025 11:33:11 -0500 (0:00:00.159) 0:03:52.504 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 18 January 2025 11:33:11 -0500 (0:00:00.057) 0:03:52.562 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-unused-volume.volume", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 18 January 2025 11:33:11 -0500 (0:00:00.129) 0:03:52.692 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 18 January 2025 11:33:11 -0500 (0:00:00.071) 0:03:52.764 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4 Saturday 18 January 2025 11:33:11 -0500 (0:00:00.123) 0:03:52.887 ****** ok: [managed-node3] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.podman : Stop and disable service] ************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 Saturday 18 January 2025 11:33:12 -0500 (0:00:00.436) 0:03:53.323 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_rootless or __podman_xdg_stat.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : See if quadlet file exists] *********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:33 Saturday 18 January 2025 11:33:12 -0500 (0:00:00.064) 0:03:53.387 ****** ok: [managed-node3] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:38 Saturday 18 January 2025 11:33:12 -0500 (0:00:00.416) 0:03:53.804 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_quadlet_stat.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Remove quadlet file] ****************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42 Saturday 18 January 2025 11:33:12 -0500 (0:00:00.055) 0:03:53.860 ****** ok: [managed-node3] => { "changed": false, "path": "/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-unused-volume.volume", "state": "absent" } TASK [fedora.linux_system_roles.podman : Refresh systemd] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:48 Saturday 18 January 2025 11:33:13 -0500 (0:00:00.441) 0:03:54.301 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_file_removed is changed", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Remove managed resource] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:58 Saturday 18 January 2025 11:33:13 -0500 (0:00:00.041) 0:03:54.343 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Remove volumes] *********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99 Saturday 18 January 2025 11:33:13 -0500 (0:00:00.051) 0:03:54.394 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Clear parsed podman variable] ********* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:116 Saturday 18 January 2025 11:33:13 -0500 (0:00:00.066) 0:03:54.460 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_parsed": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ******** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:120 Saturday 18 January 2025 11:33:13 -0500 (0:00:00.042) 0:03:54.502 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_rootless or __podman_xdg_stat.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:131 Saturday 18 January 2025 11:33:13 -0500 (0:00:00.044) 0:03:54.546 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 18 January 2025 11:33:13 -0500 (0:00:00.071) 0:03:54.618 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_item_state | d('present') != 'absent'", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 18 January 2025 11:33:13 -0500 (0:00:00.122) 0:03:54.741 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_item_state | d('present') != 'absent'", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 18 January 2025 11:33:13 -0500 (0:00:00.037) 0:03:54.778 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_cancel_user_linger": [ "user_quadlet_basic" ] }, "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - images] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:141 Saturday 18 January 2025 11:33:13 -0500 (0:00:00.043) 0:03:54.822 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_test_debug | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - volumes] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:150 Saturday 18 January 2025 11:33:13 -0500 (0:00:00.038) 0:03:54.861 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_test_debug | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - containers] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:159 Saturday 18 January 2025 11:33:13 -0500 (0:00:00.036) 0:03:54.897 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_test_debug | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - networks] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:168 Saturday 18 January 2025 11:33:13 -0500 (0:00:00.039) 0:03:54.937 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_test_debug | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - secrets] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:177 Saturday 18 January 2025 11:33:13 -0500 (0:00:00.037) 0:03:54.974 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - pods] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:187 Saturday 18 January 2025 11:33:13 -0500 (0:00:00.037) 0:03:55.011 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - services] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 Saturday 18 January 2025 11:33:13 -0500 (0:00:00.038) 0:03:55.050 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_test_debug | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 18 January 2025 11:33:13 -0500 (0:00:00.037) 0:03:55.087 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 18 January 2025 11:33:13 -0500 (0:00:00.034) 0:03:55.122 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": { "Volume": { "VolumeName": "quadlet-basic-mysql-name" } }, "__podman_quadlet_str": "", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 18 January 2025 11:33:13 -0500 (0:00:00.050) 0:03:55.172 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "absent", "__podman_systemd_unit_scope": "", "__podman_user": "user_quadlet_basic" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 18 January 2025 11:33:13 -0500 (0:00:00.064) 0:03:55.237 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_quadlet_spec | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 18 January 2025 11:33:14 -0500 (0:00:00.061) 0:03:55.299 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-basic-mysql", "__podman_quadlet_type": "volume", "__podman_rootless": true }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 18 January 2025 11:33:14 -0500 (0:00:00.097) 0:03:55.397 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 18 January 2025 11:33:14 -0500 (0:00:00.109) 0:03:55.506 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 18 January 2025 11:33:14 -0500 (0:00:00.046) 0:03:55.553 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 18 January 2025 11:33:14 -0500 (0:00:00.048) 0:03:55.601 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_group": "1111" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 18 January 2025 11:33:14 -0500 (0:00:00.211) 0:03:55.813 ****** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1737217683.9532003, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1737217679.1402063, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9160785, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "1643853349", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 18 January 2025 11:33:14 -0500 (0:00:00.422) 0:03:56.235 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "getsubids", "user_quadlet_basic" ], "delta": "0:00:00.004379", "end": "2025-01-18 11:33:15.318427", "rc": 0, "start": "2025-01-18 11:33:15.314048" } STDOUT: 0: user_quadlet_basic 589824 65536 TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 18 January 2025 11:33:15 -0500 (0:00:00.407) 0:03:56.643 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "getsubids", "-g", "user_quadlet_basic" ], "delta": "0:00:00.005346", "end": "2025-01-18 11:33:15.729287", "rc": 0, "start": "2025-01-18 11:33:15.723941" } STDOUT: 0: user_quadlet_basic 589824 65536 TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 18 January 2025 11:33:15 -0500 (0:00:00.415) 0:03:57.059 ****** ok: [managed-node3] => { "ansible_facts": { "podman_subgid_info": { "user_quadlet_basic": { "range": 65536, "start": 589824 } }, "podman_subuid_info": { "user_quadlet_basic": { "range": 65536, "start": 589824 } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 18 January 2025 11:33:15 -0500 (0:00:00.061) 0:03:57.121 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 18 January 2025 11:33:15 -0500 (0:00:00.043) 0:03:57.164 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 18 January 2025 11:33:15 -0500 (0:00:00.041) 0:03:57.206 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 18 January 2025 11:33:15 -0500 (0:00:00.036) 0:03:57.242 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 18 January 2025 11:33:16 -0500 (0:00:00.037) 0:03:57.280 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 18 January 2025 11:33:16 -0500 (0:00:00.035) 0:03:57.315 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-basic-mysql-volume.service", "__podman_systemd_scope": "user", "__podman_user_home_dir": "/home/user_quadlet_basic", "__podman_xdg_runtime_dir": "/run/user/1111" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 18 January 2025 11:33:16 -0500 (0:00:00.060) 0:03:57.376 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_path": "/home/user_quadlet_basic/.config/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 18 January 2025 11:33:16 -0500 (0:00:00.060) 0:03:57.436 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 18 January 2025 11:33:16 -0500 (0:00:00.062) 0:03:57.498 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-mysql.volume", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 18 January 2025 11:33:16 -0500 (0:00:00.096) 0:03:57.595 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 18 January 2025 11:33:16 -0500 (0:00:00.048) 0:03:57.644 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4 Saturday 18 January 2025 11:33:16 -0500 (0:00:00.083) 0:03:57.728 ****** ok: [managed-node3] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.podman : Stop and disable service] ************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 Saturday 18 January 2025 11:33:16 -0500 (0:00:00.391) 0:03:58.119 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_rootless or __podman_xdg_stat.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : See if quadlet file exists] *********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:33 Saturday 18 January 2025 11:33:16 -0500 (0:00:00.115) 0:03:58.234 ****** ok: [managed-node3] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:38 Saturday 18 January 2025 11:33:17 -0500 (0:00:00.391) 0:03:58.625 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_quadlet_stat.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Remove quadlet file] ****************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42 Saturday 18 January 2025 11:33:17 -0500 (0:00:00.035) 0:03:58.661 ****** ok: [managed-node3] => { "changed": false, "path": "/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-mysql.volume", "state": "absent" } TASK [fedora.linux_system_roles.podman : Refresh systemd] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:48 Saturday 18 January 2025 11:33:17 -0500 (0:00:00.402) 0:03:59.064 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_file_removed is changed", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Remove managed resource] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:58 Saturday 18 January 2025 11:33:17 -0500 (0:00:00.036) 0:03:59.100 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Remove volumes] *********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99 Saturday 18 January 2025 11:33:17 -0500 (0:00:00.037) 0:03:59.137 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Clear parsed podman variable] ********* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:116 Saturday 18 January 2025 11:33:17 -0500 (0:00:00.047) 0:03:59.184 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_parsed": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ******** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:120 Saturday 18 January 2025 11:33:17 -0500 (0:00:00.037) 0:03:59.222 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_rootless or __podman_xdg_stat.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:131 Saturday 18 January 2025 11:33:18 -0500 (0:00:00.038) 0:03:59.260 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 18 January 2025 11:33:18 -0500 (0:00:00.058) 0:03:59.319 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_item_state | d('present') != 'absent'", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 18 January 2025 11:33:18 -0500 (0:00:00.037) 0:03:59.356 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_item_state | d('present') != 'absent'", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 18 January 2025 11:33:18 -0500 (0:00:00.037) 0:03:59.393 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_cancel_user_linger": [ "user_quadlet_basic" ] }, "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - images] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:141 Saturday 18 January 2025 11:33:18 -0500 (0:00:00.042) 0:03:59.435 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_test_debug | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - volumes] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:150 Saturday 18 January 2025 11:33:18 -0500 (0:00:00.037) 0:03:59.472 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_test_debug | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - containers] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:159 Saturday 18 January 2025 11:33:18 -0500 (0:00:00.056) 0:03:59.529 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_test_debug | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - networks] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:168 Saturday 18 January 2025 11:33:18 -0500 (0:00:00.038) 0:03:59.567 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_test_debug | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - secrets] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:177 Saturday 18 January 2025 11:33:18 -0500 (0:00:00.038) 0:03:59.605 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - pods] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:187 Saturday 18 January 2025 11:33:18 -0500 (0:00:00.037) 0:03:59.642 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - services] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 Saturday 18 January 2025 11:33:18 -0500 (0:00:00.111) 0:03:59.754 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_test_debug | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 18 January 2025 11:33:18 -0500 (0:00:00.039) 0:03:59.793 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 18 January 2025 11:33:18 -0500 (0:00:00.033) 0:03:59.827 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": { "Network": {} }, "__podman_quadlet_str": "", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 18 January 2025 11:33:18 -0500 (0:00:00.045) 0:03:59.872 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "absent", "__podman_systemd_unit_scope": "", "__podman_user": "user_quadlet_basic" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 18 January 2025 11:33:18 -0500 (0:00:00.043) 0:03:59.916 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_quadlet_spec | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 18 January 2025 11:33:18 -0500 (0:00:00.035) 0:03:59.952 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-basic-unused-network", "__podman_quadlet_type": "network", "__podman_rootless": true }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 18 January 2025 11:33:18 -0500 (0:00:00.052) 0:04:00.004 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 18 January 2025 11:33:18 -0500 (0:00:00.063) 0:04:00.068 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 18 January 2025 11:33:18 -0500 (0:00:00.039) 0:04:00.108 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 18 January 2025 11:33:18 -0500 (0:00:00.042) 0:04:00.151 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_group": "1111" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 18 January 2025 11:33:18 -0500 (0:00:00.049) 0:04:00.201 ****** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1737217683.9532003, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1737217679.1402063, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9160785, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "1643853349", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 18 January 2025 11:33:19 -0500 (0:00:00.407) 0:04:00.608 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "getsubids", "user_quadlet_basic" ], "delta": "0:00:00.004508", "end": "2025-01-18 11:33:19.688703", "rc": 0, "start": "2025-01-18 11:33:19.684195" } STDOUT: 0: user_quadlet_basic 589824 65536 TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 18 January 2025 11:33:19 -0500 (0:00:00.423) 0:04:01.032 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "getsubids", "-g", "user_quadlet_basic" ], "delta": "0:00:00.005053", "end": "2025-01-18 11:33:20.140435", "rc": 0, "start": "2025-01-18 11:33:20.135382" } STDOUT: 0: user_quadlet_basic 589824 65536 TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 18 January 2025 11:33:20 -0500 (0:00:00.433) 0:04:01.465 ****** ok: [managed-node3] => { "ansible_facts": { "podman_subgid_info": { "user_quadlet_basic": { "range": 65536, "start": 589824 } }, "podman_subuid_info": { "user_quadlet_basic": { "range": 65536, "start": 589824 } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 18 January 2025 11:33:20 -0500 (0:00:00.057) 0:04:01.523 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 18 January 2025 11:33:20 -0500 (0:00:00.039) 0:04:01.563 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 18 January 2025 11:33:20 -0500 (0:00:00.053) 0:04:01.617 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 18 January 2025 11:33:20 -0500 (0:00:00.109) 0:04:01.727 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 18 January 2025 11:33:20 -0500 (0:00:00.044) 0:04:01.772 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 18 January 2025 11:33:20 -0500 (0:00:00.057) 0:04:01.830 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-basic-unused-network-network.service", "__podman_systemd_scope": "user", "__podman_user_home_dir": "/home/user_quadlet_basic", "__podman_xdg_runtime_dir": "/run/user/1111" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 18 January 2025 11:33:20 -0500 (0:00:00.093) 0:04:01.923 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_path": "/home/user_quadlet_basic/.config/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 18 January 2025 11:33:20 -0500 (0:00:00.065) 0:04:01.989 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 18 January 2025 11:33:20 -0500 (0:00:00.061) 0:04:02.050 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-unused-network.network", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 18 January 2025 11:33:20 -0500 (0:00:00.130) 0:04:02.180 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 18 January 2025 11:33:21 -0500 (0:00:00.072) 0:04:02.253 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4 Saturday 18 January 2025 11:33:21 -0500 (0:00:00.136) 0:04:02.389 ****** ok: [managed-node3] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.podman : Stop and disable service] ************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 Saturday 18 January 2025 11:33:21 -0500 (0:00:00.450) 0:04:02.839 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_rootless or __podman_xdg_stat.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : See if quadlet file exists] *********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:33 Saturday 18 January 2025 11:33:21 -0500 (0:00:00.045) 0:04:02.885 ****** ok: [managed-node3] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:38 Saturday 18 January 2025 11:33:22 -0500 (0:00:00.389) 0:04:03.275 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_quadlet_stat.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Remove quadlet file] ****************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42 Saturday 18 January 2025 11:33:22 -0500 (0:00:00.034) 0:04:03.309 ****** ok: [managed-node3] => { "changed": false, "path": "/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-unused-network.network", "state": "absent" } TASK [fedora.linux_system_roles.podman : Refresh systemd] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:48 Saturday 18 January 2025 11:33:22 -0500 (0:00:00.409) 0:04:03.718 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_file_removed is changed", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Remove managed resource] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:58 Saturday 18 January 2025 11:33:22 -0500 (0:00:00.040) 0:04:03.759 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Remove volumes] *********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99 Saturday 18 January 2025 11:33:22 -0500 (0:00:00.035) 0:04:03.794 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Clear parsed podman variable] ********* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:116 Saturday 18 January 2025 11:33:22 -0500 (0:00:00.047) 0:04:03.842 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_parsed": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ******** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:120 Saturday 18 January 2025 11:33:22 -0500 (0:00:00.034) 0:04:03.877 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_rootless or __podman_xdg_stat.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:131 Saturday 18 January 2025 11:33:22 -0500 (0:00:00.132) 0:04:04.009 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 18 January 2025 11:33:22 -0500 (0:00:00.100) 0:04:04.109 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_item_state | d('present') != 'absent'", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 18 January 2025 11:33:22 -0500 (0:00:00.053) 0:04:04.163 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_item_state | d('present') != 'absent'", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 18 January 2025 11:33:22 -0500 (0:00:00.040) 0:04:04.204 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_cancel_user_linger": [ "user_quadlet_basic" ] }, "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - images] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:141 Saturday 18 January 2025 11:33:23 -0500 (0:00:00.052) 0:04:04.257 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_test_debug | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - volumes] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:150 Saturday 18 January 2025 11:33:23 -0500 (0:00:00.042) 0:04:04.299 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_test_debug | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - containers] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:159 Saturday 18 January 2025 11:33:23 -0500 (0:00:00.038) 0:04:04.338 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_test_debug | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - networks] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:168 Saturday 18 January 2025 11:33:23 -0500 (0:00:00.038) 0:04:04.376 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_test_debug | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - secrets] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:177 Saturday 18 January 2025 11:33:23 -0500 (0:00:00.039) 0:04:04.415 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - pods] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:187 Saturday 18 January 2025 11:33:23 -0500 (0:00:00.038) 0:04:04.454 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - services] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 Saturday 18 January 2025 11:33:23 -0500 (0:00:00.049) 0:04:04.503 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_test_debug | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 18 January 2025 11:33:23 -0500 (0:00:00.059) 0:04:04.563 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 18 January 2025 11:33:23 -0500 (0:00:00.058) 0:04:04.621 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": {}, "__podman_quadlet_str": "[Network]\nSubnet=192.168.29.0/24\nGateway=192.168.29.1\nLabel=app=wordpress\nNetworkName=quadlet-basic-name\n", "__podman_quadlet_template_src": "templates/quadlet-basic.network.j2" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 18 January 2025 11:33:23 -0500 (0:00:00.187) 0:04:04.808 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "absent", "__podman_systemd_unit_scope": "", "__podman_user": "user_quadlet_basic" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 18 January 2025 11:33:23 -0500 (0:00:00.054) 0:04:04.863 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_quadlet_str", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 18 January 2025 11:33:23 -0500 (0:00:00.044) 0:04:04.907 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-basic", "__podman_quadlet_type": "network", "__podman_rootless": true }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 18 January 2025 11:33:23 -0500 (0:00:00.055) 0:04:04.963 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 18 January 2025 11:33:23 -0500 (0:00:00.143) 0:04:05.107 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 18 January 2025 11:33:23 -0500 (0:00:00.047) 0:04:05.155 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 18 January 2025 11:33:23 -0500 (0:00:00.062) 0:04:05.217 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_group": "1111" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 18 January 2025 11:33:24 -0500 (0:00:00.059) 0:04:05.277 ****** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1737217683.9532003, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1737217679.1402063, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9160785, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "1643853349", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 18 January 2025 11:33:24 -0500 (0:00:00.433) 0:04:05.710 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "getsubids", "user_quadlet_basic" ], "delta": "0:00:00.003926", "end": "2025-01-18 11:33:24.807606", "rc": 0, "start": "2025-01-18 11:33:24.803680" } STDOUT: 0: user_quadlet_basic 589824 65536 TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 18 January 2025 11:33:24 -0500 (0:00:00.464) 0:04:06.175 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "getsubids", "-g", "user_quadlet_basic" ], "delta": "0:00:00.005148", "end": "2025-01-18 11:33:25.290748", "rc": 0, "start": "2025-01-18 11:33:25.285600" } STDOUT: 0: user_quadlet_basic 589824 65536 TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 18 January 2025 11:33:25 -0500 (0:00:00.490) 0:04:06.666 ****** ok: [managed-node3] => { "ansible_facts": { "podman_subgid_info": { "user_quadlet_basic": { "range": 65536, "start": 589824 } }, "podman_subuid_info": { "user_quadlet_basic": { "range": 65536, "start": 589824 } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 18 January 2025 11:33:25 -0500 (0:00:00.163) 0:04:06.830 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 18 January 2025 11:33:25 -0500 (0:00:00.065) 0:04:06.895 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 18 January 2025 11:33:25 -0500 (0:00:00.046) 0:04:06.942 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 18 January 2025 11:33:25 -0500 (0:00:00.042) 0:04:06.985 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 18 January 2025 11:33:25 -0500 (0:00:00.045) 0:04:07.030 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 18 January 2025 11:33:25 -0500 (0:00:00.036) 0:04:07.067 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-basic-network.service", "__podman_systemd_scope": "user", "__podman_user_home_dir": "/home/user_quadlet_basic", "__podman_xdg_runtime_dir": "/run/user/1111" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 18 January 2025 11:33:25 -0500 (0:00:00.066) 0:04:07.133 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_path": "/home/user_quadlet_basic/.config/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 18 January 2025 11:33:25 -0500 (0:00:00.055) 0:04:07.189 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 18 January 2025 11:33:25 -0500 (0:00:00.055) 0:04:07.244 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic.network", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 18 January 2025 11:33:26 -0500 (0:00:00.126) 0:04:07.370 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 18 January 2025 11:33:26 -0500 (0:00:00.067) 0:04:07.438 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4 Saturday 18 January 2025 11:33:26 -0500 (0:00:00.211) 0:04:07.649 ****** ok: [managed-node3] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.podman : Stop and disable service] ************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 Saturday 18 January 2025 11:33:26 -0500 (0:00:00.461) 0:04:08.111 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_rootless or __podman_xdg_stat.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : See if quadlet file exists] *********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:33 Saturday 18 January 2025 11:33:26 -0500 (0:00:00.063) 0:04:08.174 ****** ok: [managed-node3] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:38 Saturday 18 January 2025 11:33:27 -0500 (0:00:00.410) 0:04:08.585 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_quadlet_stat.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Remove quadlet file] ****************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42 Saturday 18 January 2025 11:33:27 -0500 (0:00:00.049) 0:04:08.635 ****** ok: [managed-node3] => { "changed": false, "path": "/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic.network", "state": "absent" } TASK [fedora.linux_system_roles.podman : Refresh systemd] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:48 Saturday 18 January 2025 11:33:27 -0500 (0:00:00.436) 0:04:09.071 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_file_removed is changed", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Remove managed resource] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:58 Saturday 18 January 2025 11:33:27 -0500 (0:00:00.074) 0:04:09.146 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Remove volumes] *********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99 Saturday 18 January 2025 11:33:27 -0500 (0:00:00.082) 0:04:09.228 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Clear parsed podman variable] ********* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:116 Saturday 18 January 2025 11:33:28 -0500 (0:00:00.078) 0:04:09.306 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_parsed": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ******** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:120 Saturday 18 January 2025 11:33:28 -0500 (0:00:00.064) 0:04:09.371 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_rootless or __podman_xdg_stat.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:131 Saturday 18 January 2025 11:33:28 -0500 (0:00:00.047) 0:04:09.418 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 18 January 2025 11:33:28 -0500 (0:00:00.098) 0:04:09.517 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_item_state | d('present') != 'absent'", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 18 January 2025 11:33:28 -0500 (0:00:00.045) 0:04:09.563 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_item_state | d('present') != 'absent'", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 18 January 2025 11:33:28 -0500 (0:00:00.047) 0:04:09.610 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_cancel_user_linger": [ "user_quadlet_basic" ] }, "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - images] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:141 Saturday 18 January 2025 11:33:28 -0500 (0:00:00.051) 0:04:09.661 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_test_debug | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - volumes] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:150 Saturday 18 January 2025 11:33:28 -0500 (0:00:00.042) 0:04:09.703 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_test_debug | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - containers] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:159 Saturday 18 January 2025 11:33:28 -0500 (0:00:00.117) 0:04:09.820 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_test_debug | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - networks] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:168 Saturday 18 January 2025 11:33:28 -0500 (0:00:00.045) 0:04:09.866 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_test_debug | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - secrets] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:177 Saturday 18 January 2025 11:33:28 -0500 (0:00:00.040) 0:04:09.906 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - pods] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:187 Saturday 18 January 2025 11:33:28 -0500 (0:00:00.049) 0:04:09.955 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - services] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 Saturday 18 January 2025 11:33:28 -0500 (0:00:00.060) 0:04:10.016 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_test_debug | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 18 January 2025 11:33:28 -0500 (0:00:00.047) 0:04:10.064 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Cancel linger] ************************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:196 Saturday 18 January 2025 11:33:28 -0500 (0:00:00.055) 0:04:10.120 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml for managed-node3 => (item=user_quadlet_basic) TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:4 Saturday 18 January 2025 11:33:29 -0500 (0:00:00.186) 0:04:10.307 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_linger_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set cancel linger vars] *************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:11 Saturday 18 January 2025 11:33:29 -0500 (0:00:00.064) 0:04:10.371 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_xdg_runtime_dir": "/run/user/1111" }, "changed": false } TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:16 Saturday 18 January 2025 11:33:29 -0500 (0:00:00.090) 0:04:10.462 ****** ok: [managed-node3] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.podman : Gather facts for containers] ********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:21 Saturday 18 January 2025 11:33:29 -0500 (0:00:00.435) 0:04:10.898 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_xdg_stat.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Gather facts for networks] ************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:30 Saturday 18 January 2025 11:33:29 -0500 (0:00:00.036) 0:04:10.935 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_xdg_stat.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Gather secrets] *********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:40 Saturday 18 January 2025 11:33:29 -0500 (0:00:00.034) 0:04:10.969 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_xdg_stat.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Cancel linger if no more resources are in use] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:50 Saturday 18 January 2025 11:33:29 -0500 (0:00:00.036) 0:04:11.006 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_xdg_stat.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Wait for user session to exit closing state] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:62 Saturday 18 January 2025 11:33:29 -0500 (0:00:00.035) 0:04:11.042 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__cancel_linger is changed", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stop logind] ************************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:76 Saturday 18 January 2025 11:33:29 -0500 (0:00:00.034) 0:04:11.076 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__cancel_linger is changed", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Wait for user session to exit closing state] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:81 Saturday 18 January 2025 11:33:29 -0500 (0:00:00.039) 0:04:11.116 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__cancel_linger is changed", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Restart logind] *********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:92 Saturday 18 January 2025 11:33:29 -0500 (0:00:00.053) 0:04:11.170 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__cancel_linger is changed", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle credential files - absent] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:202 Saturday 18 January 2025 11:33:30 -0500 (0:00:00.168) 0:04:11.338 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - absent] ******** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:211 Saturday 18 January 2025 11:33:30 -0500 (0:00:00.051) 0:04:11.390 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [Remove test user] ******************************************************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_basic.yml:309 Saturday 18 January 2025 11:33:30 -0500 (0:00:00.085) 0:04:11.475 ****** changed: [managed-node3] => { "changed": true, "force": false, "name": "user_quadlet_basic", "remove": false, "state": "absent" } TASK [Cleanup system - root] *************************************************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_basic.yml:315 Saturday 18 January 2025 11:33:30 -0500 (0:00:00.515) 0:04:11.991 ****** included: fedora.linux_system_roles.podman for managed-node3 TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:3 Saturday 18 January 2025 11:33:30 -0500 (0:00:00.104) 0:04:12.095 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure ansible_facts used by role] **** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:3 Saturday 18 January 2025 11:33:30 -0500 (0:00:00.085) 0:04:12.181 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if system is ostree] ************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:11 Saturday 18 January 2025 11:33:31 -0500 (0:00:00.073) 0:04:12.255 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag to indicate system is ostree] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:16 Saturday 18 January 2025 11:33:31 -0500 (0:00:00.053) 0:04:12.309 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:23 Saturday 18 January 2025 11:33:31 -0500 (0:00:00.041) 0:04:12.350 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag if transactional-update exists] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:28 Saturday 18 January 2025 11:33:31 -0500 (0:00:00.042) 0:04:12.393 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:32 Saturday 18 January 2025 11:33:31 -0500 (0:00:00.042) 0:04:12.435 ****** ok: [managed-node3] => (item=RedHat.yml) => { "ansible_facts": { "__podman_packages": [ "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/vars/RedHat.yml" ], "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml" } skipping: [managed-node3] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node3] => (item=CentOS_10.yml) => { "ansible_facts": { "__podman_packages": [ "iptables-nft", "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } ok: [managed-node3] => (item=CentOS_10.yml) => { "ansible_facts": { "__podman_packages": [ "iptables-nft", "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } TASK [fedora.linux_system_roles.podman : Gather the package facts] ************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 Saturday 18 January 2025 11:33:31 -0500 (0:00:00.093) 0:04:12.529 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Enable copr if requested] ************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:10 Saturday 18 January 2025 11:33:32 -0500 (0:00:01.219) 0:04:13.748 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_use_copr | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure required packages are installed] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:14 Saturday 18 January 2025 11:33:32 -0500 (0:00:00.084) 0:04:13.833 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "(__podman_packages | difference(ansible_facts.packages))", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:28 Saturday 18 January 2025 11:33:32 -0500 (0:00:00.126) 0:04:13.959 ****** skipping: [managed-node3] => { "false_condition": "__podman_is_transactional | d(false)" } TASK [fedora.linux_system_roles.podman : Reboot transactional update systems] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:33 Saturday 18 January 2025 11:33:32 -0500 (0:00:00.096) 0:04:14.056 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if reboot is needed and not set] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:38 Saturday 18 January 2025 11:33:32 -0500 (0:00:00.112) 0:04:14.169 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get podman version] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:46 Saturday 18 January 2025 11:33:33 -0500 (0:00:00.094) 0:04:14.263 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "--version" ], "delta": "0:00:00.025861", "end": "2025-01-18 11:33:33.446083", "rc": 0, "start": "2025-01-18 11:33:33.420222" } STDOUT: podman version 5.3.1 TASK [fedora.linux_system_roles.podman : Set podman version] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:52 Saturday 18 January 2025 11:33:33 -0500 (0:00:00.566) 0:04:14.830 ****** ok: [managed-node3] => { "ansible_facts": { "podman_version": "5.3.1" }, "changed": false } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.2 or later] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:56 Saturday 18 January 2025 11:33:33 -0500 (0:00:00.110) 0:04:14.941 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_version is version(\"4.2\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:63 Saturday 18 January 2025 11:33:33 -0500 (0:00:00.082) 0:04:15.023 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_version is version(\"4.4\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:73 Saturday 18 January 2025 11:33:33 -0500 (0:00:00.166) 0:04:15.192 ****** META: end_host conditional evaluated to False, continuing execution for managed-node3 skipping: [managed-node3] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node3" } MSG: end_host conditional evaluated to false, continuing execution for managed-node3 TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:80 Saturday 18 January 2025 11:33:34 -0500 (0:00:00.136) 0:04:15.328 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__has_type_pod or __has_pod_file_ext or __has_pod_file_src_ext or __has_pod_template_src_ext or __has_pod_template_src_ext_j2", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:96 Saturday 18 January 2025 11:33:34 -0500 (0:00:00.105) 0:04:15.434 ****** META: end_host conditional evaluated to False, continuing execution for managed-node3 skipping: [managed-node3] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node3" } MSG: end_host conditional evaluated to false, continuing execution for managed-node3 TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:109 Saturday 18 January 2025 11:33:34 -0500 (0:00:00.129) 0:04:15.563 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 18 January 2025 11:33:34 -0500 (0:00:00.147) 0:04:15.710 ****** ok: [managed-node3] => { "ansible_facts": { "getent_passwd": { "root": [ "x", "0", "0", "Super User", "/root", "/bin/bash" ] } }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 18 January 2025 11:33:34 -0500 (0:00:00.450) 0:04:16.161 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 18 January 2025 11:33:35 -0500 (0:00:00.179) 0:04:16.341 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 18 January 2025 11:33:35 -0500 (0:00:00.095) 0:04:16.437 ****** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1737217683.9532003, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1737217679.1402063, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9160785, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "1643853349", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 18 January 2025 11:33:35 -0500 (0:00:00.511) 0:04:16.948 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 18 January 2025 11:33:35 -0500 (0:00:00.129) 0:04:17.078 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 18 January 2025 11:33:35 -0500 (0:00:00.098) 0:04:17.177 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 18 January 2025 11:33:35 -0500 (0:00:00.063) 0:04:17.240 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 18 January 2025 11:33:36 -0500 (0:00:00.069) 0:04:17.309 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 18 January 2025 11:33:36 -0500 (0:00:00.069) 0:04:17.379 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 18 January 2025 11:33:36 -0500 (0:00:00.044) 0:04:17.424 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 18 January 2025 11:33:36 -0500 (0:00:00.043) 0:04:17.467 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set config file paths] **************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:115 Saturday 18 January 2025 11:33:36 -0500 (0:00:00.059) 0:04:17.527 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_container_conf_file": "/etc/containers/containers.conf.d/50-systemroles.conf", "__podman_policy_json_file": "/etc/containers/policy.json", "__podman_registries_conf_file": "/etc/containers/registries.conf.d/50-systemroles.conf", "__podman_storage_conf_file": "/etc/containers/storage.conf" }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle container.conf.d] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:124 Saturday 18 January 2025 11:33:36 -0500 (0:00:00.115) 0:04:17.642 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure containers.d exists] *********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:5 Saturday 18 January 2025 11:33:36 -0500 (0:00:00.126) 0:04:17.769 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update container config file] ********* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:13 Saturday 18 January 2025 11:33:36 -0500 (0:00:00.059) 0:04:17.828 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle registries.conf.d] ************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:127 Saturday 18 January 2025 11:33:36 -0500 (0:00:00.052) 0:04:17.881 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure registries.d exists] *********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:5 Saturday 18 January 2025 11:33:36 -0500 (0:00:00.079) 0:04:17.960 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update registries config file] ******** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:13 Saturday 18 January 2025 11:33:36 -0500 (0:00:00.066) 0:04:18.027 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle storage.conf] ****************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:130 Saturday 18 January 2025 11:33:36 -0500 (0:00:00.169) 0:04:18.197 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure storage.conf parent dir exists] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:5 Saturday 18 January 2025 11:33:37 -0500 (0:00:00.100) 0:04:18.297 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update storage config file] *********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:13 Saturday 18 January 2025 11:33:37 -0500 (0:00:00.064) 0:04:18.362 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle policy.json] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:133 Saturday 18 January 2025 11:33:37 -0500 (0:00:00.045) 0:04:18.407 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure policy.json parent dir exists] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:6 Saturday 18 January 2025 11:33:37 -0500 (0:00:00.087) 0:04:18.495 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat the policy.json file] ************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:14 Saturday 18 January 2025 11:33:37 -0500 (0:00:00.040) 0:04:18.536 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get the existing policy.json] ********* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:19 Saturday 18 January 2025 11:33:37 -0500 (0:00:00.043) 0:04:18.579 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Write new policy.json file] *********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:25 Saturday 18 January 2025 11:33:37 -0500 (0:00:00.041) 0:04:18.621 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [Manage firewall for specified ports] ************************************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:139 Saturday 18 January 2025 11:33:37 -0500 (0:00:00.055) 0:04:18.676 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_firewall | length > 0", "skip_reason": "Conditional result was False" } TASK [Manage selinux for specified ports] ************************************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:146 Saturday 18 January 2025 11:33:37 -0500 (0:00:00.062) 0:04:18.739 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_selinux_ports | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Keep track of users that need to cancel linger] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:153 Saturday 18 January 2025 11:33:37 -0500 (0:00:00.054) 0:04:18.794 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_cancel_user_linger": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - present] ******* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:157 Saturday 18 January 2025 11:33:37 -0500 (0:00:00.043) 0:04:18.837 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle credential files - present] **** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:166 Saturday 18 January 2025 11:33:37 -0500 (0:00:00.035) 0:04:18.873 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle secrets] *********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:175 Saturday 18 January 2025 11:33:37 -0500 (0:00:00.034) 0:04:18.907 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml for managed-node3 => (item=(censored due to no_log)) included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml for managed-node3 => (item=(censored due to no_log)) TASK [fedora.linux_system_roles.podman : Set variables part 1] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:3 Saturday 18 January 2025 11:33:37 -0500 (0:00:00.110) 0:04:19.018 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:7 Saturday 18 January 2025 11:33:37 -0500 (0:00:00.171) 0:04:19.189 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node3 => (item=(censored due to no_log)) TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 18 January 2025 11:33:38 -0500 (0:00:00.105) 0:04:19.295 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 18 January 2025 11:33:38 -0500 (0:00:00.078) 0:04:19.374 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 18 January 2025 11:33:38 -0500 (0:00:00.071) 0:04:19.445 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 18 January 2025 11:33:38 -0500 (0:00:00.086) 0:04:19.532 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 18 January 2025 11:33:38 -0500 (0:00:00.066) 0:04:19.598 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 18 January 2025 11:33:38 -0500 (0:00:00.069) 0:04:19.667 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 18 January 2025 11:33:38 -0500 (0:00:00.068) 0:04:19.736 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 18 January 2025 11:33:38 -0500 (0:00:00.068) 0:04:19.804 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 18 January 2025 11:33:38 -0500 (0:00:00.060) 0:04:19.865 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 18 January 2025 11:33:38 -0500 (0:00:00.058) 0:04:19.924 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 18 January 2025 11:33:38 -0500 (0:00:00.061) 0:04:19.985 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 18 January 2025 11:33:38 -0500 (0:00:00.039) 0:04:20.025 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set variables part 2] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:14 Saturday 18 January 2025 11:33:38 -0500 (0:00:00.050) 0:04:20.076 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_rootless": false, "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:20 Saturday 18 January 2025 11:33:38 -0500 (0:00:00.054) 0:04:20.130 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 18 January 2025 11:33:38 -0500 (0:00:00.063) 0:04:20.194 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 18 January 2025 11:33:38 -0500 (0:00:00.033) 0:04:20.228 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 18 January 2025 11:33:39 -0500 (0:00:00.115) 0:04:20.343 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:25 Saturday 18 January 2025 11:33:39 -0500 (0:00:00.055) 0:04:20.399 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Manage each secret] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:41 Saturday 18 January 2025 11:33:39 -0500 (0:00:00.087) 0:04:20.487 ****** changed: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } TASK [fedora.linux_system_roles.podman : Set variables part 1] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:3 Saturday 18 January 2025 11:33:39 -0500 (0:00:00.482) 0:04:20.970 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:7 Saturday 18 January 2025 11:33:39 -0500 (0:00:00.039) 0:04:21.009 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node3 => (item=(censored due to no_log)) TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 18 January 2025 11:33:39 -0500 (0:00:00.071) 0:04:21.080 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 18 January 2025 11:33:39 -0500 (0:00:00.060) 0:04:21.141 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 18 January 2025 11:33:39 -0500 (0:00:00.057) 0:04:21.198 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 18 January 2025 11:33:40 -0500 (0:00:00.058) 0:04:21.257 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 18 January 2025 11:33:40 -0500 (0:00:00.044) 0:04:21.301 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 18 January 2025 11:33:40 -0500 (0:00:00.039) 0:04:21.340 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 18 January 2025 11:33:40 -0500 (0:00:00.036) 0:04:21.377 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 18 January 2025 11:33:40 -0500 (0:00:00.035) 0:04:21.413 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 18 January 2025 11:33:40 -0500 (0:00:00.034) 0:04:21.447 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 18 January 2025 11:33:40 -0500 (0:00:00.036) 0:04:21.484 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 18 January 2025 11:33:40 -0500 (0:00:00.034) 0:04:21.519 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 18 January 2025 11:33:40 -0500 (0:00:00.039) 0:04:21.559 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set variables part 2] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:14 Saturday 18 January 2025 11:33:40 -0500 (0:00:00.058) 0:04:21.617 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_rootless": false, "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:20 Saturday 18 January 2025 11:33:40 -0500 (0:00:00.354) 0:04:21.971 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 18 January 2025 11:33:40 -0500 (0:00:00.116) 0:04:22.088 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 18 January 2025 11:33:40 -0500 (0:00:00.111) 0:04:22.200 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 18 January 2025 11:33:41 -0500 (0:00:00.092) 0:04:22.293 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:25 Saturday 18 January 2025 11:33:41 -0500 (0:00:00.062) 0:04:22.356 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Manage each secret] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:41 Saturday 18 January 2025 11:33:41 -0500 (0:00:00.064) 0:04:22.420 ****** changed: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } TASK [fedora.linux_system_roles.podman : Handle Kubernetes specifications] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:182 Saturday 18 January 2025 11:33:41 -0500 (0:00:00.471) 0:04:22.891 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Quadlet specifications] ******** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:189 Saturday 18 January 2025 11:33:41 -0500 (0:00:00.053) 0:04:22.945 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node3 => (item=(censored due to no_log)) included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node3 => (item=(censored due to no_log)) included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node3 => (item=(censored due to no_log)) included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node3 => (item=(censored due to no_log)) included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node3 => (item=(censored due to no_log)) TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 18 January 2025 11:33:41 -0500 (0:00:00.284) 0:04:23.229 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": { "Container": { "ContainerName": "quadlet-basic-mysql-name", "Environment": [ "FOO=/bin/busybox-extras", "BAZ=test" ], "Image": "quay.io/linux-system-roles/mysql:5.6", "Network": "quadlet-basic.network", "PodmanArgs": "--secret=mysql_container_root_password,type=env,target=MYSQL_ROOT_PASSWORD --secret=json_secret,type=mount,target=/tmp/test.json", "Volume": "quadlet-basic-mysql.volume:/var/lib/mysql" }, "Install": { "WantedBy": "default.target" } }, "__podman_quadlet_str": "", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 18 January 2025 11:33:42 -0500 (0:00:00.093) 0:04:23.322 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "absent", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 18 January 2025 11:33:42 -0500 (0:00:00.087) 0:04:23.410 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_quadlet_spec | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 18 January 2025 11:33:42 -0500 (0:00:00.065) 0:04:23.475 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-basic-mysql", "__podman_quadlet_type": "container", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 18 January 2025 11:33:42 -0500 (0:00:00.085) 0:04:23.560 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 18 January 2025 11:33:42 -0500 (0:00:00.110) 0:04:23.671 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 18 January 2025 11:33:42 -0500 (0:00:00.386) 0:04:24.057 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 18 January 2025 11:33:42 -0500 (0:00:00.070) 0:04:24.127 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 18 January 2025 11:33:42 -0500 (0:00:00.084) 0:04:24.212 ****** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1737217683.9532003, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1737217679.1402063, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9160785, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "1643853349", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 18 January 2025 11:33:43 -0500 (0:00:00.423) 0:04:24.635 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 18 January 2025 11:33:43 -0500 (0:00:00.037) 0:04:24.673 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 18 January 2025 11:33:43 -0500 (0:00:00.039) 0:04:24.712 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 18 January 2025 11:33:43 -0500 (0:00:00.039) 0:04:24.751 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 18 January 2025 11:33:43 -0500 (0:00:00.037) 0:04:24.789 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 18 January 2025 11:33:43 -0500 (0:00:00.039) 0:04:24.828 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 18 January 2025 11:33:43 -0500 (0:00:00.038) 0:04:24.867 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 18 January 2025 11:33:43 -0500 (0:00:00.039) 0:04:24.907 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 18 January 2025 11:33:43 -0500 (0:00:00.038) 0:04:24.945 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [ "quay.io/linux-system-roles/mysql:5.6" ], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-basic-mysql.service", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 18 January 2025 11:33:43 -0500 (0:00:00.060) 0:04:25.006 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 18 January 2025 11:33:43 -0500 (0:00:00.039) 0:04:25.046 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 18 January 2025 11:33:43 -0500 (0:00:00.035) 0:04:25.081 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_images": [ "quay.io/linux-system-roles/mysql:5.6" ], "__podman_quadlet_file": "/etc/containers/systemd/quadlet-basic-mysql.container", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 18 January 2025 11:33:43 -0500 (0:00:00.082) 0:04:25.163 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 18 January 2025 11:33:43 -0500 (0:00:00.049) 0:04:25.213 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4 Saturday 18 January 2025 11:33:44 -0500 (0:00:00.166) 0:04:25.379 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stop and disable service] ************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 Saturday 18 January 2025 11:33:44 -0500 (0:00:00.034) 0:04:25.414 ****** changed: [managed-node3] => { "changed": true, "enabled": false, "failed_when_result": false, "name": "quadlet-basic-mysql.service", "state": "stopped", "status": { "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0", "ActiveEnterTimestamp": "Sat 2025-01-18 11:32:51 EST", "ActiveEnterTimestampMonotonic": "544735258", "ActiveExitTimestamp": "Sat 2025-01-18 11:32:51 EST", "ActiveExitTimestampMonotonic": "544895698", "ActiveState": "failed", "After": "-.mount basic.target quadlet-basic-network.service sysinit.target systemd-journald.socket quadlet-basic-mysql-volume.service network-online.target system.slice", "AllowIsolate": "no", "AssertResult": "yes", "AssertTimestamp": "Sat 2025-01-18 11:32:51 EST", "AssertTimestampMonotonic": "544493315", "Before": "shutdown.target multi-user.target", "BindLogSockets": "no", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "260967000", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanLiveMount": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Sat 2025-01-18 11:32:51 EST", "ConditionTimestampMonotonic": "544493312", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroupId": "9904", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DebugInvocation": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "yes", "DelegateControllers": "cpu cpuset io memory pids", "Description": "quadlet-basic-mysql.service", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveMemoryHigh": "3698237440", "EffectiveMemoryMax": "3698237440", "EffectiveTasksMax": "22365", "Environment": "PODMAN_SYSTEMD_UNIT=quadlet-basic-mysql.service", "ExecMainCode": "1", "ExecMainExitTimestamp": "Sat 2025-01-18 11:32:51 EST", "ExecMainExitTimestampMonotonic": "544894650", "ExecMainHandoffTimestampMonotonic": "0", "ExecMainPID": "61441", "ExecMainStartTimestamp": "Sat 2025-01-18 11:32:51 EST", "ExecMainStartTimestampMonotonic": "544680001", "ExecMainStatus": "1", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman run --name quadlet-basic-mysql-name --cidfile=/run/quadlet-basic-mysql.cid --replace --rm --cgroups=split --network quadlet-basic-name --sdnotify=conmon -d -v quadlet-basic-mysql-name:/var/lib/mysql --env BAZ=test --env FOO=/bin/busybox-extras --secret=mysql_container_root_password,type=env,target=MYSQL_ROOT_PASSWORD --secret=json_secret,type=mount,target=/tmp/test.json quay.io/linux-system-roles/mysql:5.6 ; ignore_errors=no ; start_time=[Sat 2025-01-18 11:32:51 EST] ; stop_time=[Sat 2025-01-18 11:32:51 EST] ; pid=61441 ; code=exited ; status=1 }", "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman run --name quadlet-basic-mysql-name --cidfile=/run/quadlet-basic-mysql.cid --replace --rm --cgroups=split --network quadlet-basic-name --sdnotify=conmon -d -v quadlet-basic-mysql-name:/var/lib/mysql --env BAZ=test --env FOO=/bin/busybox-extras --secret=mysql_container_root_password,type=env,target=MYSQL_ROOT_PASSWORD --secret=json_secret,type=mount,target=/tmp/test.json quay.io/linux-system-roles/mysql:5.6 ; flags= ; start_time=[Sat 2025-01-18 11:32:51 EST] ; stop_time=[Sat 2025-01-18 11:32:51 EST] ; pid=61441 ; code=exited ; status=1 }", "ExecStop": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-basic-mysql.cid ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-basic-mysql.cid ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPost": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-basic-mysql.cid ; ignore_errors=yes ; start_time=[Sat 2025-01-18 11:32:51 EST] ; stop_time=[Sat 2025-01-18 11:32:51 EST] ; pid=61488 ; code=exited ; status=0 }", "ExecStopPostEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-basic-mysql.cid ; flags=ignore-failure ; start_time=[Sat 2025-01-18 11:32:51 EST] ; stop_time=[Sat 2025-01-18 11:32:51 EST] ; pid=61488 ; code=exited ; status=0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/run/systemd/generator/quadlet-basic-mysql.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "quadlet-basic-mysql.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestamp": "Sat 2025-01-18 11:32:51 EST", "InactiveEnterTimestampMonotonic": "544935831", "InactiveExitTimestamp": "Sat 2025-01-18 11:32:51 EST", "InactiveExitTimestampMonotonic": "544502036", "InvocationID": "8d3491b3239a463cb88dfeb1abbd439b", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13978", "LimitNPROCSoft": "13978", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13978", "LimitSIGPENDINGSoft": "13978", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LiveMountResult": "success", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureDurationUSec": "[not set]", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3243524096", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "20369408", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "[not set]", "MemorySwapMax": "infinity", "MemorySwapPeak": "0", "MemoryZSwapCurrent": "[not set]", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "quadlet-basic-mysql.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "all", "OOMPolicy": "continue", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivatePIDs": "no", "PrivateTmp": "no", "PrivateTmpEx": "no", "PrivateUsers": "no", "PrivateUsersEx": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectControlGroupsEx": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "quadlet-basic-mysql-volume.service system.slice quadlet-basic-network.service -.mount sysinit.target", "RequiresMountsFor": "/run/containers", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "exit-code", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "SourcePath": "/etc/containers/systemd/quadlet-basic-mysql.container", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestamp": "Sat 2025-01-18 11:32:51 EST", "StateChangeTimestampMonotonic": "544935831", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "failed", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-basic-mysql", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22365", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "WantedBy": "multi-user.target", "Wants": "network-online.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.podman : See if quadlet file exists] *********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:33 Saturday 18 January 2025 11:33:44 -0500 (0:00:00.817) 0:04:26.232 ****** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1737217972.1290865, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "0b6cac7929623f1059e78ef39b8b0a25169b28a6", "ctime": 1737217969.9660866, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 184549611, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1737217969.6850865, "nlink": 1, "path": "/etc/containers/systemd/quadlet-basic-mysql.container", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 448, "uid": 0, "version": "316731645", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:38 Saturday 18 January 2025 11:33:45 -0500 (0:00:00.429) 0:04:26.662 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Slurp quadlet file] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:6 Saturday 18 January 2025 11:33:45 -0500 (0:00:00.062) 0:04:26.724 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:12 Saturday 18 January 2025 11:33:45 -0500 (0:00:00.408) 0:04:27.133 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet yaml file] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:44 Saturday 18 January 2025 11:33:45 -0500 (0:00:00.090) 0:04:27.223 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Reset raw variable] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:52 Saturday 18 January 2025 11:33:46 -0500 (0:00:00.076) 0:04:27.299 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_raw": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Remove quadlet file] ****************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42 Saturday 18 January 2025 11:33:46 -0500 (0:00:00.062) 0:04:27.361 ****** changed: [managed-node3] => { "changed": true, "path": "/etc/containers/systemd/quadlet-basic-mysql.container", "state": "absent" } TASK [fedora.linux_system_roles.podman : Refresh systemd] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:48 Saturday 18 January 2025 11:33:46 -0500 (0:00:00.430) 0:04:27.792 ****** ok: [managed-node3] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Remove managed resource] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:58 Saturday 18 January 2025 11:33:47 -0500 (0:00:00.758) 0:04:28.551 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Remove volumes] *********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99 Saturday 18 January 2025 11:33:47 -0500 (0:00:00.485) 0:04:29.036 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Clear parsed podman variable] ********* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:116 Saturday 18 January 2025 11:33:47 -0500 (0:00:00.062) 0:04:29.098 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_parsed": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ******** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:120 Saturday 18 January 2025 11:33:47 -0500 (0:00:00.042) 0:04:29.141 ****** changed: [managed-node3] => { "changed": true, "cmd": [ "podman", "image", "prune", "--all", "-f" ], "delta": "0:00:00.255995", "end": "2025-01-18 11:33:48.471877", "rc": 0, "start": "2025-01-18 11:33:48.215882" } STDOUT: dd3b2a5dcb48ff61113592ed5ddd762581be4387c7bc552375a2159422aa6bf5 TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:131 Saturday 18 January 2025 11:33:48 -0500 (0:00:00.659) 0:04:29.800 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 18 January 2025 11:33:48 -0500 (0:00:00.065) 0:04:29.865 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 18 January 2025 11:33:48 -0500 (0:00:00.136) 0:04:30.002 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 18 January 2025 11:33:48 -0500 (0:00:00.045) 0:04:30.048 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - images] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:141 Saturday 18 January 2025 11:33:48 -0500 (0:00:00.041) 0:04:30.090 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "images", "-n" ], "delta": "0:00:00.031567", "end": "2025-01-18 11:33:49.199911", "rc": 0, "start": "2025-01-18 11:33:49.168344" } TASK [fedora.linux_system_roles.podman : For testing and debugging - volumes] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:150 Saturday 18 January 2025 11:33:49 -0500 (0:00:00.435) 0:04:30.525 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "volume", "ls", "-n" ], "delta": "0:00:00.029409", "end": "2025-01-18 11:33:49.629405", "rc": 0, "start": "2025-01-18 11:33:49.599996" } STDOUT: local quadlet-basic-mysql-name local systemd-quadlet-basic-unused-volume TASK [fedora.linux_system_roles.podman : For testing and debugging - containers] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:159 Saturday 18 January 2025 11:33:49 -0500 (0:00:00.427) 0:04:30.953 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "ps", "--noheading" ], "delta": "0:00:00.029208", "end": "2025-01-18 11:33:50.069173", "rc": 0, "start": "2025-01-18 11:33:50.039965" } TASK [fedora.linux_system_roles.podman : For testing and debugging - networks] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:168 Saturday 18 January 2025 11:33:50 -0500 (0:00:00.442) 0:04:31.396 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "network", "ls", "-n", "-q" ], "delta": "0:00:00.031059", "end": "2025-01-18 11:33:50.507346", "rc": 0, "start": "2025-01-18 11:33:50.476287" } STDOUT: podman quadlet-basic-name systemd-quadlet-basic-unused-network TASK [fedora.linux_system_roles.podman : For testing and debugging - secrets] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:177 Saturday 18 January 2025 11:33:50 -0500 (0:00:00.453) 0:04:31.849 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - pods] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:187 Saturday 18 January 2025 11:33:51 -0500 (0:00:00.486) 0:04:32.336 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - services] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 Saturday 18 January 2025 11:33:51 -0500 (0:00:00.468) 0:04:32.804 ****** ok: [managed-node3] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "audit-rules.service": { "name": "audit-rules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autofs.service": { "name": "autofs.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "capsule@.service": { "name": "capsule@.service", "source": "systemd", "state": "unknown", "status": "static" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd-restricted.service": { "name": "chronyd-restricted.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-broker.service": { "name": "dbus-broker.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "active", "status": "alias" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd.service": { "name": "dhcpcd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd@.service": { "name": "dhcpcd@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "fips-crypto-policy-overlay.service": { "name": "fips-crypto-policy-overlay.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fsidd.service": { "name": "fsidd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "stopped", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "grub2-systemd-integration.service": { "name": "grub2-systemd-integration.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "hv_kvp_daemon.service": { "name": "hv_kvp_daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "logrotate.service": { "name": "logrotate.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm-devices-import.service": { "name": "lvm-devices-import.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "modprobe@.service": { "name": "modprobe@.service", "source": "systemd", "state": "unknown", "status": "static" }, "modprobe@configfs.service": { "name": "modprobe@configfs.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@dm_mod.service": { "name": "modprobe@dm_mod.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@drm.service": { "name": "modprobe@drm.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@efi_pstore.service": { "name": "modprobe@efi_pstore.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@fuse.service": { "name": "modprobe@fuse.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@loop.service": { "name": "modprobe@loop.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "netavark-dhcp-proxy.service": { "name": "netavark-dhcp-proxy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "netavark-firewalld-reload.service": { "name": "netavark-firewalld-reload.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "network.service": { "name": "network.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "static" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "pam_namespace.service": { "name": "pam_namespace.service", "source": "systemd", "state": "inactive", "status": "static" }, "pcscd.service": { "name": "pcscd.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "podman-auto-update.service": { "name": "podman-auto-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-clean-transient.service": { "name": "podman-clean-transient.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-kube@.service": { "name": "podman-kube@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "podman-restart.service": { "name": "podman-restart.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman.service": { "name": "podman.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "inactive", "status": "static" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quadlet-basic-mysql-volume.service": { "name": "quadlet-basic-mysql-volume.service", "source": "systemd", "state": "stopped", "status": "generated" }, "quadlet-basic-mysql.service": { "name": "quadlet-basic-mysql.service", "source": "systemd", "state": "stopped", "status": "failed" }, "quadlet-basic-network.service": { "name": "quadlet-basic-network.service", "source": "systemd", "state": "stopped", "status": "generated" }, "quadlet-basic-unused-network-network.service": { "name": "quadlet-basic-unused-network-network.service", "source": "systemd", "state": "stopped", "status": "generated" }, "quadlet-basic-unused-volume-volume.service": { "name": "quadlet-basic-unused-volume-volume.service", "source": "systemd", "state": "stopped", "status": "generated" }, "quotaon-root.service": { "name": "quotaon-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "quotaon@.service": { "name": "quotaon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-svcgssd.service": { "name": "rpc-svcgssd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpmdb-migrate.service": { "name": "rpmdb-migrate.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rpmdb-rebuild.service": { "name": "rpmdb-rebuild.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "selinux-check-proper-disable.service": { "name": "selinux-check-proper-disable.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "serial-getty@ttyS0.service": { "name": "serial-getty@ttyS0.service", "source": "systemd", "state": "running", "status": "active" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ssh-host-keys-migration.service": { "name": "ssh-host-keys-migration.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-unix-local@.service": { "name": "sshd-unix-local@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-battery-check.service": { "name": "systemd-battery-check.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bless-boot.service": { "name": "systemd-bless-boot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-boot-check-no-failures.service": { "name": "systemd-boot-check-no-failures.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-boot-random-seed.service": { "name": "systemd-boot-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-boot-update.service": { "name": "systemd-boot-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-bootctl@.service": { "name": "systemd-bootctl@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-confext.service": { "name": "systemd-confext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-creds@.service": { "name": "systemd-creds@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-growfs-root.service": { "name": "systemd-growfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-growfs@.service": { "name": "systemd-growfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-clear.service": { "name": "systemd-hibernate-clear.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate-resume.service": { "name": "systemd-hibernate-resume.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald-sync@.service": { "name": "systemd-journald-sync@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-journald@.service": { "name": "systemd-journald@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-network-generator.service": { "name": "systemd-network-generator.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-networkd-wait-online.service": { "name": "systemd-networkd-wait-online.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-oomd.service": { "name": "systemd-oomd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-pcrextend@.service": { "name": "systemd-pcrextend@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrfs-root.service": { "name": "systemd-pcrfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pcrfs@.service": { "name": "systemd-pcrfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrlock-file-system.service": { "name": "systemd-pcrlock-file-system.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-code.service": { "name": "systemd-pcrlock-firmware-code.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-config.service": { "name": "systemd-pcrlock-firmware-config.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-machine-id.service": { "name": "systemd-pcrlock-machine-id.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-make-policy.service": { "name": "systemd-pcrlock-make-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-authority.service": { "name": "systemd-pcrlock-secureboot-authority.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-policy.service": { "name": "systemd-pcrlock-secureboot-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock@.service": { "name": "systemd-pcrlock@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrmachine.service": { "name": "systemd-pcrmachine.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-initrd.service": { "name": "systemd-pcrphase-initrd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-sysinit.service": { "name": "systemd-pcrphase-sysinit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase.service": { "name": "systemd-pcrphase.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-quotacheck-root.service": { "name": "systemd-quotacheck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-quotacheck@.service": { "name": "systemd-quotacheck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-repart.service": { "name": "systemd-repart.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-soft-reboot.service": { "name": "systemd-soft-reboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysext.service": { "name": "systemd-sysext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-sysext@.service": { "name": "systemd-sysext@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-sysupdate-reboot.service": { "name": "systemd-sysupdate-reboot.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysupdate.service": { "name": "systemd-sysupdate.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev-early.service": { "name": "systemd-tmpfiles-setup-dev-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup-early.service": { "name": "systemd-tpm2-setup-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup.service": { "name": "systemd-tpm2-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-load-credentials.service": { "name": "systemd-udev-load-credentials.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "active" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "active" }, "ypbind.service": { "name": "ypbind.service", "source": "systemd", "state": "stopped", "status": "not-found" } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 18 January 2025 11:33:53 -0500 (0:00:02.237) 0:04:35.041 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 18 January 2025 11:33:53 -0500 (0:00:00.058) 0:04:35.100 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": { "Volume": {} }, "__podman_quadlet_str": "", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 18 January 2025 11:33:53 -0500 (0:00:00.076) 0:04:35.177 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "absent", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 18 January 2025 11:33:54 -0500 (0:00:00.072) 0:04:35.250 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_quadlet_spec | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 18 January 2025 11:33:54 -0500 (0:00:00.061) 0:04:35.311 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-basic-unused-volume", "__podman_quadlet_type": "volume", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 18 January 2025 11:33:54 -0500 (0:00:00.064) 0:04:35.376 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 18 January 2025 11:33:54 -0500 (0:00:00.085) 0:04:35.462 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 18 January 2025 11:33:54 -0500 (0:00:00.044) 0:04:35.506 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 18 January 2025 11:33:54 -0500 (0:00:00.121) 0:04:35.627 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 18 January 2025 11:33:54 -0500 (0:00:00.049) 0:04:35.676 ****** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1737217683.9532003, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1737217679.1402063, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9160785, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "1643853349", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 18 January 2025 11:33:54 -0500 (0:00:00.410) 0:04:36.087 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 18 January 2025 11:33:54 -0500 (0:00:00.037) 0:04:36.125 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 18 January 2025 11:33:54 -0500 (0:00:00.049) 0:04:36.174 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 18 January 2025 11:33:54 -0500 (0:00:00.051) 0:04:36.226 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 18 January 2025 11:33:55 -0500 (0:00:00.059) 0:04:36.286 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 18 January 2025 11:33:55 -0500 (0:00:00.061) 0:04:36.348 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 18 January 2025 11:33:55 -0500 (0:00:00.064) 0:04:36.412 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 18 January 2025 11:33:55 -0500 (0:00:00.058) 0:04:36.471 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 18 January 2025 11:33:55 -0500 (0:00:00.065) 0:04:36.536 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-basic-unused-volume-volume.service", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 18 January 2025 11:33:55 -0500 (0:00:00.096) 0:04:36.633 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 18 January 2025 11:33:55 -0500 (0:00:00.063) 0:04:36.697 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 18 January 2025 11:33:55 -0500 (0:00:00.071) 0:04:36.768 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/etc/containers/systemd/quadlet-basic-unused-volume.volume", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 18 January 2025 11:33:55 -0500 (0:00:00.144) 0:04:36.913 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 18 January 2025 11:33:55 -0500 (0:00:00.061) 0:04:36.974 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4 Saturday 18 January 2025 11:33:55 -0500 (0:00:00.102) 0:04:37.076 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stop and disable service] ************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 Saturday 18 January 2025 11:33:55 -0500 (0:00:00.055) 0:04:37.132 ****** changed: [managed-node3] => { "changed": true, "enabled": false, "failed_when_result": false, "name": "quadlet-basic-unused-volume-volume.service", "state": "stopped", "status": { "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0", "ActiveEnterTimestamp": "Sat 2025-01-18 11:32:39 EST", "ActiveEnterTimestampMonotonic": "532492793", "ActiveExitTimestampMonotonic": "0", "ActiveState": "active", "After": "system.slice -.mount basic.target sysinit.target network-online.target systemd-journald.socket", "AllowIsolate": "no", "AssertResult": "yes", "AssertTimestamp": "Sat 2025-01-18 11:32:39 EST", "AssertTimestampMonotonic": "532438082", "Before": "shutdown.target", "BindLogSockets": "no", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "37679000", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanLiveMount": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Sat 2025-01-18 11:32:39 EST", "ConditionTimestampMonotonic": "532438079", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroupId": "9320", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DebugInvocation": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "quadlet-basic-unused-volume-volume.service", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveMemoryHigh": "3698237440", "EffectiveMemoryMax": "3698237440", "EffectiveTasksMax": "22365", "ExecMainCode": "1", "ExecMainExitTimestamp": "Sat 2025-01-18 11:32:39 EST", "ExecMainExitTimestampMonotonic": "532492577", "ExecMainHandoffTimestamp": "Sat 2025-01-18 11:32:39 EST", "ExecMainHandoffTimestampMonotonic": "532451453", "ExecMainPID": "60241", "ExecMainStartTimestamp": "Sat 2025-01-18 11:32:39 EST", "ExecMainStartTimestampMonotonic": "532438797", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman volume create --ignore systemd-quadlet-basic-unused-volume ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman volume create --ignore systemd-quadlet-basic-unused-volume ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/run/systemd/generator/quadlet-basic-unused-volume-volume.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "quadlet-basic-unused-volume-volume.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestamp": "Sat 2025-01-18 11:32:39 EST", "InactiveExitTimestampMonotonic": "532439298", "InvocationID": "4b11e5f6cc5644518fea5d9244157297", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13978", "LimitNPROCSoft": "13978", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13978", "LimitSIGPENDINGSoft": "13978", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LiveMountResult": "success", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureDurationUSec": "[not set]", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3280252928", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "14499840", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "[not set]", "MemorySwapMax": "infinity", "MemorySwapPeak": "0", "MemoryZSwapCurrent": "[not set]", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "quadlet-basic-unused-volume-volume.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivatePIDs": "no", "PrivateTmp": "no", "PrivateTmpEx": "no", "PrivateUsers": "no", "PrivateUsersEx": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectControlGroupsEx": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "yes", "RemoveIPC": "no", "Requires": "-.mount sysinit.target system.slice", "RequiresMountsFor": "/run/containers", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "SourcePath": "/etc/containers/systemd/quadlet-basic-unused-volume.volume", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestamp": "Sat 2025-01-18 11:32:39 EST", "StateChangeTimestampMonotonic": "532492793", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "exited", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-basic-unused-volume-volume", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22365", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "infinity", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "oneshot", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "Wants": "network-online.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.podman : See if quadlet file exists] *********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:33 Saturday 18 January 2025 11:33:56 -0500 (0:00:01.048) 0:04:38.180 ****** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1737217957.8670878, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "fd0ae560360afa5541b866560b1e849d25e216ef", "ctime": 1737217957.8700879, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 553648347, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1737217957.571088, "nlink": 1, "path": "/etc/containers/systemd/quadlet-basic-unused-volume.volume", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 53, "uid": 0, "version": "5998598", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:38 Saturday 18 January 2025 11:33:57 -0500 (0:00:00.423) 0:04:38.604 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Slurp quadlet file] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:6 Saturday 18 January 2025 11:33:57 -0500 (0:00:00.061) 0:04:38.665 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:12 Saturday 18 January 2025 11:33:57 -0500 (0:00:00.385) 0:04:39.051 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet yaml file] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:44 Saturday 18 January 2025 11:33:57 -0500 (0:00:00.077) 0:04:39.128 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Reset raw variable] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:52 Saturday 18 January 2025 11:33:57 -0500 (0:00:00.037) 0:04:39.166 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_raw": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Remove quadlet file] ****************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42 Saturday 18 January 2025 11:33:57 -0500 (0:00:00.034) 0:04:39.200 ****** changed: [managed-node3] => { "changed": true, "path": "/etc/containers/systemd/quadlet-basic-unused-volume.volume", "state": "absent" } TASK [fedora.linux_system_roles.podman : Refresh systemd] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:48 Saturday 18 January 2025 11:33:58 -0500 (0:00:00.397) 0:04:39.597 ****** ok: [managed-node3] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Remove managed resource] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:58 Saturday 18 January 2025 11:33:59 -0500 (0:00:00.765) 0:04:40.362 ****** changed: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } TASK [fedora.linux_system_roles.podman : Remove volumes] *********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99 Saturday 18 January 2025 11:33:59 -0500 (0:00:00.440) 0:04:40.803 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Clear parsed podman variable] ********* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:116 Saturday 18 January 2025 11:33:59 -0500 (0:00:00.046) 0:04:40.850 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_parsed": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ******** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:120 Saturday 18 January 2025 11:33:59 -0500 (0:00:00.033) 0:04:40.883 ****** changed: [managed-node3] => { "changed": true, "cmd": [ "podman", "image", "prune", "--all", "-f" ], "delta": "0:00:00.031737", "end": "2025-01-18 11:33:59.984538", "rc": 0, "start": "2025-01-18 11:33:59.952801" } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:131 Saturday 18 January 2025 11:34:00 -0500 (0:00:00.419) 0:04:41.303 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 18 January 2025 11:34:00 -0500 (0:00:00.061) 0:04:41.364 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 18 January 2025 11:34:00 -0500 (0:00:00.034) 0:04:41.399 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 18 January 2025 11:34:00 -0500 (0:00:00.031) 0:04:41.431 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - images] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:141 Saturday 18 January 2025 11:34:00 -0500 (0:00:00.120) 0:04:41.551 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "images", "-n" ], "delta": "0:00:00.031669", "end": "2025-01-18 11:34:00.655316", "rc": 0, "start": "2025-01-18 11:34:00.623647" } TASK [fedora.linux_system_roles.podman : For testing and debugging - volumes] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:150 Saturday 18 January 2025 11:34:00 -0500 (0:00:00.421) 0:04:41.973 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "volume", "ls", "-n" ], "delta": "0:00:00.033027", "end": "2025-01-18 11:34:01.082236", "rc": 0, "start": "2025-01-18 11:34:01.049209" } STDOUT: local quadlet-basic-mysql-name TASK [fedora.linux_system_roles.podman : For testing and debugging - containers] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:159 Saturday 18 January 2025 11:34:01 -0500 (0:00:00.429) 0:04:42.402 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "ps", "--noheading" ], "delta": "0:00:00.030889", "end": "2025-01-18 11:34:01.509352", "rc": 0, "start": "2025-01-18 11:34:01.478463" } TASK [fedora.linux_system_roles.podman : For testing and debugging - networks] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:168 Saturday 18 January 2025 11:34:01 -0500 (0:00:00.425) 0:04:42.827 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "network", "ls", "-n", "-q" ], "delta": "0:00:00.031147", "end": "2025-01-18 11:34:01.933416", "rc": 0, "start": "2025-01-18 11:34:01.902269" } STDOUT: podman quadlet-basic-name systemd-quadlet-basic-unused-network TASK [fedora.linux_system_roles.podman : For testing and debugging - secrets] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:177 Saturday 18 January 2025 11:34:02 -0500 (0:00:00.423) 0:04:43.251 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - pods] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:187 Saturday 18 January 2025 11:34:02 -0500 (0:00:00.418) 0:04:43.669 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - services] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 Saturday 18 January 2025 11:34:02 -0500 (0:00:00.420) 0:04:44.090 ****** ok: [managed-node3] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "audit-rules.service": { "name": "audit-rules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autofs.service": { "name": "autofs.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "capsule@.service": { "name": "capsule@.service", "source": "systemd", "state": "unknown", "status": "static" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd-restricted.service": { "name": "chronyd-restricted.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-broker.service": { "name": "dbus-broker.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "active", "status": "alias" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd.service": { "name": "dhcpcd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd@.service": { "name": "dhcpcd@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "fips-crypto-policy-overlay.service": { "name": "fips-crypto-policy-overlay.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fsidd.service": { "name": "fsidd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "stopped", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "grub2-systemd-integration.service": { "name": "grub2-systemd-integration.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "hv_kvp_daemon.service": { "name": "hv_kvp_daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "logrotate.service": { "name": "logrotate.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm-devices-import.service": { "name": "lvm-devices-import.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "modprobe@.service": { "name": "modprobe@.service", "source": "systemd", "state": "unknown", "status": "static" }, "modprobe@configfs.service": { "name": "modprobe@configfs.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@dm_mod.service": { "name": "modprobe@dm_mod.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@drm.service": { "name": "modprobe@drm.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@efi_pstore.service": { "name": "modprobe@efi_pstore.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@fuse.service": { "name": "modprobe@fuse.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@loop.service": { "name": "modprobe@loop.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "netavark-dhcp-proxy.service": { "name": "netavark-dhcp-proxy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "netavark-firewalld-reload.service": { "name": "netavark-firewalld-reload.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "network.service": { "name": "network.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "static" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "pam_namespace.service": { "name": "pam_namespace.service", "source": "systemd", "state": "inactive", "status": "static" }, "pcscd.service": { "name": "pcscd.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "podman-auto-update.service": { "name": "podman-auto-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-clean-transient.service": { "name": "podman-clean-transient.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-kube@.service": { "name": "podman-kube@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "podman-restart.service": { "name": "podman-restart.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman.service": { "name": "podman.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "inactive", "status": "static" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quadlet-basic-mysql-volume.service": { "name": "quadlet-basic-mysql-volume.service", "source": "systemd", "state": "stopped", "status": "generated" }, "quadlet-basic-mysql.service": { "name": "quadlet-basic-mysql.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "quadlet-basic-network.service": { "name": "quadlet-basic-network.service", "source": "systemd", "state": "stopped", "status": "generated" }, "quadlet-basic-unused-network-network.service": { "name": "quadlet-basic-unused-network-network.service", "source": "systemd", "state": "stopped", "status": "generated" }, "quotaon-root.service": { "name": "quotaon-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "quotaon@.service": { "name": "quotaon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-svcgssd.service": { "name": "rpc-svcgssd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpmdb-migrate.service": { "name": "rpmdb-migrate.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rpmdb-rebuild.service": { "name": "rpmdb-rebuild.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "selinux-check-proper-disable.service": { "name": "selinux-check-proper-disable.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "serial-getty@ttyS0.service": { "name": "serial-getty@ttyS0.service", "source": "systemd", "state": "running", "status": "active" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ssh-host-keys-migration.service": { "name": "ssh-host-keys-migration.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-unix-local@.service": { "name": "sshd-unix-local@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-battery-check.service": { "name": "systemd-battery-check.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bless-boot.service": { "name": "systemd-bless-boot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-boot-check-no-failures.service": { "name": "systemd-boot-check-no-failures.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-boot-random-seed.service": { "name": "systemd-boot-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-boot-update.service": { "name": "systemd-boot-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-bootctl@.service": { "name": "systemd-bootctl@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-confext.service": { "name": "systemd-confext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-creds@.service": { "name": "systemd-creds@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-growfs-root.service": { "name": "systemd-growfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-growfs@.service": { "name": "systemd-growfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-clear.service": { "name": "systemd-hibernate-clear.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate-resume.service": { "name": "systemd-hibernate-resume.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald-sync@.service": { "name": "systemd-journald-sync@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-journald@.service": { "name": "systemd-journald@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-network-generator.service": { "name": "systemd-network-generator.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-networkd-wait-online.service": { "name": "systemd-networkd-wait-online.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-oomd.service": { "name": "systemd-oomd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-pcrextend@.service": { "name": "systemd-pcrextend@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrfs-root.service": { "name": "systemd-pcrfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pcrfs@.service": { "name": "systemd-pcrfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrlock-file-system.service": { "name": "systemd-pcrlock-file-system.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-code.service": { "name": "systemd-pcrlock-firmware-code.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-config.service": { "name": "systemd-pcrlock-firmware-config.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-machine-id.service": { "name": "systemd-pcrlock-machine-id.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-make-policy.service": { "name": "systemd-pcrlock-make-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-authority.service": { "name": "systemd-pcrlock-secureboot-authority.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-policy.service": { "name": "systemd-pcrlock-secureboot-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock@.service": { "name": "systemd-pcrlock@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrmachine.service": { "name": "systemd-pcrmachine.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-initrd.service": { "name": "systemd-pcrphase-initrd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-sysinit.service": { "name": "systemd-pcrphase-sysinit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase.service": { "name": "systemd-pcrphase.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-quotacheck-root.service": { "name": "systemd-quotacheck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-quotacheck@.service": { "name": "systemd-quotacheck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-repart.service": { "name": "systemd-repart.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-soft-reboot.service": { "name": "systemd-soft-reboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysext.service": { "name": "systemd-sysext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-sysext@.service": { "name": "systemd-sysext@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-sysupdate-reboot.service": { "name": "systemd-sysupdate-reboot.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysupdate.service": { "name": "systemd-sysupdate.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev-early.service": { "name": "systemd-tmpfiles-setup-dev-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup-early.service": { "name": "systemd-tpm2-setup-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup.service": { "name": "systemd-tpm2-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-load-credentials.service": { "name": "systemd-udev-load-credentials.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "active" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "active" }, "ypbind.service": { "name": "ypbind.service", "source": "systemd", "state": "stopped", "status": "not-found" } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 18 January 2025 11:34:04 -0500 (0:00:02.150) 0:04:46.241 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 18 January 2025 11:34:05 -0500 (0:00:00.033) 0:04:46.274 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": { "Volume": { "VolumeName": "quadlet-basic-mysql-name" } }, "__podman_quadlet_str": "", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 18 January 2025 11:34:05 -0500 (0:00:00.045) 0:04:46.320 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "absent", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 18 January 2025 11:34:05 -0500 (0:00:00.042) 0:04:46.362 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_quadlet_spec | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 18 January 2025 11:34:05 -0500 (0:00:00.036) 0:04:46.398 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-basic-mysql", "__podman_quadlet_type": "volume", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 18 January 2025 11:34:05 -0500 (0:00:00.051) 0:04:46.450 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 18 January 2025 11:34:05 -0500 (0:00:00.062) 0:04:46.513 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 18 January 2025 11:34:05 -0500 (0:00:00.046) 0:04:46.560 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 18 January 2025 11:34:05 -0500 (0:00:00.039) 0:04:46.599 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 18 January 2025 11:34:05 -0500 (0:00:00.047) 0:04:46.647 ****** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1737217683.9532003, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1737217679.1402063, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9160785, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "1643853349", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 18 January 2025 11:34:05 -0500 (0:00:00.471) 0:04:47.118 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 18 January 2025 11:34:05 -0500 (0:00:00.037) 0:04:47.156 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 18 January 2025 11:34:05 -0500 (0:00:00.036) 0:04:47.192 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 18 January 2025 11:34:05 -0500 (0:00:00.037) 0:04:47.230 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 18 January 2025 11:34:06 -0500 (0:00:00.036) 0:04:47.266 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 18 January 2025 11:34:06 -0500 (0:00:00.036) 0:04:47.302 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 18 January 2025 11:34:06 -0500 (0:00:00.037) 0:04:47.340 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 18 January 2025 11:34:06 -0500 (0:00:00.035) 0:04:47.375 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 18 January 2025 11:34:06 -0500 (0:00:00.037) 0:04:47.413 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-basic-mysql-volume.service", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 18 January 2025 11:34:06 -0500 (0:00:00.057) 0:04:47.470 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 18 January 2025 11:34:06 -0500 (0:00:00.038) 0:04:47.508 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 18 January 2025 11:34:06 -0500 (0:00:00.033) 0:04:47.542 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/etc/containers/systemd/quadlet-basic-mysql.volume", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 18 January 2025 11:34:06 -0500 (0:00:00.082) 0:04:47.625 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 18 January 2025 11:34:06 -0500 (0:00:00.042) 0:04:47.667 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4 Saturday 18 January 2025 11:34:06 -0500 (0:00:00.076) 0:04:47.743 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stop and disable service] ************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 Saturday 18 January 2025 11:34:06 -0500 (0:00:00.034) 0:04:47.778 ****** changed: [managed-node3] => { "changed": true, "enabled": false, "failed_when_result": false, "name": "quadlet-basic-mysql-volume.service", "state": "stopped", "status": { "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0", "ActiveEnterTimestamp": "Sat 2025-01-18 11:32:34 EST", "ActiveEnterTimestampMonotonic": "527222856", "ActiveExitTimestampMonotonic": "0", "ActiveState": "active", "After": "sysinit.target network-online.target -.mount basic.target systemd-journald.socket system.slice", "AllowIsolate": "no", "AssertResult": "yes", "AssertTimestamp": "Sat 2025-01-18 11:32:33 EST", "AssertTimestampMonotonic": "527167872", "Before": "shutdown.target", "BindLogSockets": "no", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "38170000", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanLiveMount": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Sat 2025-01-18 11:32:33 EST", "ConditionTimestampMonotonic": "527167867", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroupId": "9281", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DebugInvocation": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "quadlet-basic-mysql-volume.service", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveMemoryHigh": "3698237440", "EffectiveMemoryMax": "3698237440", "EffectiveTasksMax": "22365", "ExecMainCode": "1", "ExecMainExitTimestamp": "Sat 2025-01-18 11:32:34 EST", "ExecMainExitTimestampMonotonic": "527222643", "ExecMainHandoffTimestamp": "Sat 2025-01-18 11:32:33 EST", "ExecMainHandoffTimestampMonotonic": "527180479", "ExecMainPID": "59415", "ExecMainStartTimestamp": "Sat 2025-01-18 11:32:33 EST", "ExecMainStartTimestampMonotonic": "527168633", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman volume create --ignore quadlet-basic-mysql-name ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman volume create --ignore quadlet-basic-mysql-name ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/run/systemd/generator/quadlet-basic-mysql-volume.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "quadlet-basic-mysql-volume.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestamp": "Sat 2025-01-18 11:32:33 EST", "InactiveExitTimestampMonotonic": "527169099", "InvocationID": "96c8a04d1df0470089e31b5166aacddf", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13978", "LimitNPROCSoft": "13978", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13978", "LimitSIGPENDINGSoft": "13978", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LiveMountResult": "success", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureDurationUSec": "[not set]", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3280642048", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "14450688", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "[not set]", "MemorySwapMax": "infinity", "MemorySwapPeak": "0", "MemoryZSwapCurrent": "[not set]", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "quadlet-basic-mysql-volume.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivatePIDs": "no", "PrivateTmp": "no", "PrivateTmpEx": "no", "PrivateUsers": "no", "PrivateUsersEx": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectControlGroupsEx": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "yes", "RemoveIPC": "no", "Requires": "-.mount sysinit.target system.slice", "RequiresMountsFor": "/run/containers", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "SourcePath": "/etc/containers/systemd/quadlet-basic-mysql.volume", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestamp": "Sat 2025-01-18 11:32:34 EST", "StateChangeTimestampMonotonic": "527222856", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "exited", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-basic-mysql-volume", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22365", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "infinity", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "oneshot", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "Wants": "network-online.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.podman : See if quadlet file exists] *********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:33 Saturday 18 January 2025 11:34:07 -0500 (0:00:00.810) 0:04:48.588 ****** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1737217972.8380866, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "90a3571bfc7670328fe3f8fb625585613dbd9c4a", "ctime": 1737217952.650089, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 499122393, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1737217952.3670893, "nlink": 1, "path": "/etc/containers/systemd/quadlet-basic-mysql.volume", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 89, "uid": 0, "version": "1403006486", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:38 Saturday 18 January 2025 11:34:07 -0500 (0:00:00.481) 0:04:49.070 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Slurp quadlet file] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:6 Saturday 18 January 2025 11:34:07 -0500 (0:00:00.061) 0:04:49.132 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:12 Saturday 18 January 2025 11:34:08 -0500 (0:00:00.377) 0:04:49.509 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet yaml file] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:44 Saturday 18 January 2025 11:34:08 -0500 (0:00:00.051) 0:04:49.561 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Reset raw variable] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:52 Saturday 18 January 2025 11:34:08 -0500 (0:00:00.042) 0:04:49.604 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_raw": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Remove quadlet file] ****************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42 Saturday 18 January 2025 11:34:08 -0500 (0:00:00.036) 0:04:49.641 ****** changed: [managed-node3] => { "changed": true, "path": "/etc/containers/systemd/quadlet-basic-mysql.volume", "state": "absent" } TASK [fedora.linux_system_roles.podman : Refresh systemd] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:48 Saturday 18 January 2025 11:34:08 -0500 (0:00:00.386) 0:04:50.027 ****** ok: [managed-node3] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Remove managed resource] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:58 Saturday 18 January 2025 11:34:09 -0500 (0:00:00.742) 0:04:50.770 ****** changed: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } TASK [fedora.linux_system_roles.podman : Remove volumes] *********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99 Saturday 18 January 2025 11:34:09 -0500 (0:00:00.448) 0:04:51.219 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Clear parsed podman variable] ********* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:116 Saturday 18 January 2025 11:34:10 -0500 (0:00:00.046) 0:04:51.265 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_parsed": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ******** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:120 Saturday 18 January 2025 11:34:10 -0500 (0:00:00.036) 0:04:51.302 ****** changed: [managed-node3] => { "changed": true, "cmd": [ "podman", "image", "prune", "--all", "-f" ], "delta": "0:00:00.030519", "end": "2025-01-18 11:34:10.403610", "rc": 0, "start": "2025-01-18 11:34:10.373091" } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:131 Saturday 18 January 2025 11:34:10 -0500 (0:00:00.418) 0:04:51.720 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 18 January 2025 11:34:10 -0500 (0:00:00.061) 0:04:51.782 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 18 January 2025 11:34:10 -0500 (0:00:00.033) 0:04:51.816 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 18 January 2025 11:34:10 -0500 (0:00:00.035) 0:04:51.851 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - images] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:141 Saturday 18 January 2025 11:34:10 -0500 (0:00:00.034) 0:04:51.886 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "images", "-n" ], "delta": "0:00:00.032851", "end": "2025-01-18 11:34:10.989831", "rc": 0, "start": "2025-01-18 11:34:10.956980" } TASK [fedora.linux_system_roles.podman : For testing and debugging - volumes] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:150 Saturday 18 January 2025 11:34:11 -0500 (0:00:00.503) 0:04:52.390 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "volume", "ls", "-n" ], "delta": "0:00:00.031035", "end": "2025-01-18 11:34:11.494646", "rc": 0, "start": "2025-01-18 11:34:11.463611" } TASK [fedora.linux_system_roles.podman : For testing and debugging - containers] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:159 Saturday 18 January 2025 11:34:11 -0500 (0:00:00.422) 0:04:52.812 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "ps", "--noheading" ], "delta": "0:00:00.030822", "end": "2025-01-18 11:34:11.923273", "rc": 0, "start": "2025-01-18 11:34:11.892451" } TASK [fedora.linux_system_roles.podman : For testing and debugging - networks] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:168 Saturday 18 January 2025 11:34:11 -0500 (0:00:00.432) 0:04:53.244 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "network", "ls", "-n", "-q" ], "delta": "0:00:00.030341", "end": "2025-01-18 11:34:12.348037", "rc": 0, "start": "2025-01-18 11:34:12.317696" } STDOUT: podman quadlet-basic-name systemd-quadlet-basic-unused-network TASK [fedora.linux_system_roles.podman : For testing and debugging - secrets] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:177 Saturday 18 January 2025 11:34:12 -0500 (0:00:00.422) 0:04:53.666 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - pods] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:187 Saturday 18 January 2025 11:34:12 -0500 (0:00:00.427) 0:04:54.094 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - services] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 Saturday 18 January 2025 11:34:13 -0500 (0:00:00.425) 0:04:54.520 ****** ok: [managed-node3] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "audit-rules.service": { "name": "audit-rules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autofs.service": { "name": "autofs.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "capsule@.service": { "name": "capsule@.service", "source": "systemd", "state": "unknown", "status": "static" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd-restricted.service": { "name": "chronyd-restricted.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-broker.service": { "name": "dbus-broker.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "active", "status": "alias" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd.service": { "name": "dhcpcd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd@.service": { "name": "dhcpcd@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "fips-crypto-policy-overlay.service": { "name": "fips-crypto-policy-overlay.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fsidd.service": { "name": "fsidd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "stopped", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "grub2-systemd-integration.service": { "name": "grub2-systemd-integration.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "hv_kvp_daemon.service": { "name": "hv_kvp_daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "logrotate.service": { "name": "logrotate.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm-devices-import.service": { "name": "lvm-devices-import.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "modprobe@.service": { "name": "modprobe@.service", "source": "systemd", "state": "unknown", "status": "static" }, "modprobe@configfs.service": { "name": "modprobe@configfs.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@dm_mod.service": { "name": "modprobe@dm_mod.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@drm.service": { "name": "modprobe@drm.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@efi_pstore.service": { "name": "modprobe@efi_pstore.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@fuse.service": { "name": "modprobe@fuse.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@loop.service": { "name": "modprobe@loop.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "netavark-dhcp-proxy.service": { "name": "netavark-dhcp-proxy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "netavark-firewalld-reload.service": { "name": "netavark-firewalld-reload.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "network.service": { "name": "network.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "static" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "pam_namespace.service": { "name": "pam_namespace.service", "source": "systemd", "state": "inactive", "status": "static" }, "pcscd.service": { "name": "pcscd.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "podman-auto-update.service": { "name": "podman-auto-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-clean-transient.service": { "name": "podman-clean-transient.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-kube@.service": { "name": "podman-kube@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "podman-restart.service": { "name": "podman-restart.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman.service": { "name": "podman.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "inactive", "status": "static" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quadlet-basic-mysql.service": { "name": "quadlet-basic-mysql.service", "source": "systemd", "state": "stopped", "status": "failed" }, "quadlet-basic-network.service": { "name": "quadlet-basic-network.service", "source": "systemd", "state": "stopped", "status": "generated" }, "quadlet-basic-unused-network-network.service": { "name": "quadlet-basic-unused-network-network.service", "source": "systemd", "state": "stopped", "status": "generated" }, "quotaon-root.service": { "name": "quotaon-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "quotaon@.service": { "name": "quotaon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-svcgssd.service": { "name": "rpc-svcgssd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpmdb-migrate.service": { "name": "rpmdb-migrate.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rpmdb-rebuild.service": { "name": "rpmdb-rebuild.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "selinux-check-proper-disable.service": { "name": "selinux-check-proper-disable.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "serial-getty@ttyS0.service": { "name": "serial-getty@ttyS0.service", "source": "systemd", "state": "running", "status": "active" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ssh-host-keys-migration.service": { "name": "ssh-host-keys-migration.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-unix-local@.service": { "name": "sshd-unix-local@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-battery-check.service": { "name": "systemd-battery-check.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bless-boot.service": { "name": "systemd-bless-boot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-boot-check-no-failures.service": { "name": "systemd-boot-check-no-failures.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-boot-random-seed.service": { "name": "systemd-boot-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-boot-update.service": { "name": "systemd-boot-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-bootctl@.service": { "name": "systemd-bootctl@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-confext.service": { "name": "systemd-confext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-creds@.service": { "name": "systemd-creds@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-growfs-root.service": { "name": "systemd-growfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-growfs@.service": { "name": "systemd-growfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-clear.service": { "name": "systemd-hibernate-clear.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate-resume.service": { "name": "systemd-hibernate-resume.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald-sync@.service": { "name": "systemd-journald-sync@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-journald@.service": { "name": "systemd-journald@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-network-generator.service": { "name": "systemd-network-generator.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-networkd-wait-online.service": { "name": "systemd-networkd-wait-online.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-oomd.service": { "name": "systemd-oomd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-pcrextend@.service": { "name": "systemd-pcrextend@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrfs-root.service": { "name": "systemd-pcrfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pcrfs@.service": { "name": "systemd-pcrfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrlock-file-system.service": { "name": "systemd-pcrlock-file-system.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-code.service": { "name": "systemd-pcrlock-firmware-code.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-config.service": { "name": "systemd-pcrlock-firmware-config.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-machine-id.service": { "name": "systemd-pcrlock-machine-id.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-make-policy.service": { "name": "systemd-pcrlock-make-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-authority.service": { "name": "systemd-pcrlock-secureboot-authority.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-policy.service": { "name": "systemd-pcrlock-secureboot-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock@.service": { "name": "systemd-pcrlock@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrmachine.service": { "name": "systemd-pcrmachine.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-initrd.service": { "name": "systemd-pcrphase-initrd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-sysinit.service": { "name": "systemd-pcrphase-sysinit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase.service": { "name": "systemd-pcrphase.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-quotacheck-root.service": { "name": "systemd-quotacheck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-quotacheck@.service": { "name": "systemd-quotacheck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-repart.service": { "name": "systemd-repart.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-soft-reboot.service": { "name": "systemd-soft-reboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysext.service": { "name": "systemd-sysext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-sysext@.service": { "name": "systemd-sysext@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-sysupdate-reboot.service": { "name": "systemd-sysupdate-reboot.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysupdate.service": { "name": "systemd-sysupdate.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev-early.service": { "name": "systemd-tmpfiles-setup-dev-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup-early.service": { "name": "systemd-tpm2-setup-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup.service": { "name": "systemd-tpm2-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-load-credentials.service": { "name": "systemd-udev-load-credentials.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "active" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "active" }, "ypbind.service": { "name": "ypbind.service", "source": "systemd", "state": "stopped", "status": "not-found" } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 18 January 2025 11:34:15 -0500 (0:00:02.181) 0:04:56.702 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 18 January 2025 11:34:15 -0500 (0:00:00.034) 0:04:56.737 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": { "Network": {} }, "__podman_quadlet_str": "", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 18 January 2025 11:34:15 -0500 (0:00:00.043) 0:04:56.780 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "absent", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 18 January 2025 11:34:15 -0500 (0:00:00.043) 0:04:56.824 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_quadlet_spec | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 18 January 2025 11:34:15 -0500 (0:00:00.035) 0:04:56.860 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-basic-unused-network", "__podman_quadlet_type": "network", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 18 January 2025 11:34:15 -0500 (0:00:00.050) 0:04:56.911 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 18 January 2025 11:34:15 -0500 (0:00:00.062) 0:04:56.973 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 18 January 2025 11:34:15 -0500 (0:00:00.040) 0:04:57.013 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 18 January 2025 11:34:15 -0500 (0:00:00.038) 0:04:57.052 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 18 January 2025 11:34:15 -0500 (0:00:00.048) 0:04:57.101 ****** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1737217683.9532003, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1737217679.1402063, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9160785, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "1643853349", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 18 January 2025 11:34:16 -0500 (0:00:00.392) 0:04:57.493 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 18 January 2025 11:34:16 -0500 (0:00:00.126) 0:04:57.620 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 18 January 2025 11:34:16 -0500 (0:00:00.037) 0:04:57.657 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 18 January 2025 11:34:16 -0500 (0:00:00.044) 0:04:57.702 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 18 January 2025 11:34:16 -0500 (0:00:00.037) 0:04:57.740 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 18 January 2025 11:34:16 -0500 (0:00:00.037) 0:04:57.777 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 18 January 2025 11:34:16 -0500 (0:00:00.037) 0:04:57.814 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 18 January 2025 11:34:16 -0500 (0:00:00.038) 0:04:57.853 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 18 January 2025 11:34:16 -0500 (0:00:00.036) 0:04:57.889 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-basic-unused-network-network.service", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 18 January 2025 11:34:16 -0500 (0:00:00.059) 0:04:57.949 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 18 January 2025 11:34:16 -0500 (0:00:00.036) 0:04:57.986 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 18 January 2025 11:34:16 -0500 (0:00:00.034) 0:04:58.020 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/etc/containers/systemd/quadlet-basic-unused-network.network", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 18 January 2025 11:34:16 -0500 (0:00:00.076) 0:04:58.097 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 18 January 2025 11:34:16 -0500 (0:00:00.042) 0:04:58.139 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4 Saturday 18 January 2025 11:34:16 -0500 (0:00:00.076) 0:04:58.215 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stop and disable service] ************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 Saturday 18 January 2025 11:34:17 -0500 (0:00:00.033) 0:04:58.249 ****** changed: [managed-node3] => { "changed": true, "enabled": false, "failed_when_result": false, "name": "quadlet-basic-unused-network-network.service", "state": "stopped", "status": { "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0", "ActiveEnterTimestamp": "Sat 2025-01-18 11:32:29 EST", "ActiveEnterTimestampMonotonic": "522688909", "ActiveExitTimestampMonotonic": "0", "ActiveState": "active", "After": "sysinit.target network-online.target system.slice basic.target systemd-journald.socket -.mount", "AllowIsolate": "no", "AssertResult": "yes", "AssertTimestamp": "Sat 2025-01-18 11:32:29 EST", "AssertTimestampMonotonic": "522644389", "Before": "shutdown.target", "BindLogSockets": "no", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "36993000", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanLiveMount": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Sat 2025-01-18 11:32:29 EST", "ConditionTimestampMonotonic": "522644385", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroupId": "9242", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DebugInvocation": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "quadlet-basic-unused-network-network.service", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveMemoryHigh": "3698237440", "EffectiveMemoryMax": "3698237440", "EffectiveTasksMax": "22365", "ExecMainCode": "1", "ExecMainExitTimestamp": "Sat 2025-01-18 11:32:29 EST", "ExecMainExitTimestampMonotonic": "522688678", "ExecMainHandoffTimestamp": "Sat 2025-01-18 11:32:29 EST", "ExecMainHandoffTimestampMonotonic": "522655389", "ExecMainPID": "58590", "ExecMainStartTimestamp": "Sat 2025-01-18 11:32:29 EST", "ExecMainStartTimestampMonotonic": "522645189", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman network create --ignore systemd-quadlet-basic-unused-network ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman network create --ignore systemd-quadlet-basic-unused-network ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/run/systemd/generator/quadlet-basic-unused-network-network.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "quadlet-basic-unused-network-network.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestamp": "Sat 2025-01-18 11:32:29 EST", "InactiveExitTimestampMonotonic": "522645663", "InvocationID": "39e4f33cde484a019eb6ec56c7109395", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13978", "LimitNPROCSoft": "13978", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13978", "LimitSIGPENDINGSoft": "13978", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LiveMountResult": "success", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureDurationUSec": "[not set]", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3289026560", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "16523264", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "[not set]", "MemorySwapMax": "infinity", "MemorySwapPeak": "0", "MemoryZSwapCurrent": "[not set]", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "quadlet-basic-unused-network-network.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivatePIDs": "no", "PrivateTmp": "no", "PrivateTmpEx": "no", "PrivateUsers": "no", "PrivateUsersEx": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectControlGroupsEx": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "yes", "RemoveIPC": "no", "Requires": "sysinit.target -.mount system.slice", "RequiresMountsFor": "/run/containers", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "SourcePath": "/etc/containers/systemd/quadlet-basic-unused-network.network", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestamp": "Sat 2025-01-18 11:32:29 EST", "StateChangeTimestampMonotonic": "522688909", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "exited", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-basic-unused-network-network", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22365", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "infinity", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "oneshot", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "Wants": "network-online.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.podman : See if quadlet file exists] *********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:33 Saturday 18 January 2025 11:34:17 -0500 (0:00:00.803) 0:04:59.053 ****** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1737217948.11609, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "52c9d75ecaf81203cc1f1a3b1dd00fcd25067b01", "ctime": 1737217948.11909, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 452985053, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1737217947.83909, "nlink": 1, "path": "/etc/containers/systemd/quadlet-basic-unused-network.network", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 54, "uid": 0, "version": "781081945", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:38 Saturday 18 January 2025 11:34:18 -0500 (0:00:00.395) 0:04:59.449 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Slurp quadlet file] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:6 Saturday 18 January 2025 11:34:18 -0500 (0:00:00.136) 0:04:59.585 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:12 Saturday 18 January 2025 11:34:18 -0500 (0:00:00.380) 0:04:59.966 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet yaml file] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:44 Saturday 18 January 2025 11:34:18 -0500 (0:00:00.052) 0:05:00.018 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Reset raw variable] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:52 Saturday 18 January 2025 11:34:18 -0500 (0:00:00.034) 0:05:00.053 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_raw": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Remove quadlet file] ****************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42 Saturday 18 January 2025 11:34:18 -0500 (0:00:00.035) 0:05:00.089 ****** changed: [managed-node3] => { "changed": true, "path": "/etc/containers/systemd/quadlet-basic-unused-network.network", "state": "absent" } TASK [fedora.linux_system_roles.podman : Refresh systemd] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:48 Saturday 18 January 2025 11:34:19 -0500 (0:00:00.387) 0:05:00.477 ****** ok: [managed-node3] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Remove managed resource] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:58 Saturday 18 January 2025 11:34:19 -0500 (0:00:00.743) 0:05:01.220 ****** changed: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } TASK [fedora.linux_system_roles.podman : Remove volumes] *********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99 Saturday 18 January 2025 11:34:20 -0500 (0:00:00.439) 0:05:01.660 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Clear parsed podman variable] ********* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:116 Saturday 18 January 2025 11:34:20 -0500 (0:00:00.045) 0:05:01.705 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_parsed": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ******** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:120 Saturday 18 January 2025 11:34:20 -0500 (0:00:00.041) 0:05:01.746 ****** changed: [managed-node3] => { "changed": true, "cmd": [ "podman", "image", "prune", "--all", "-f" ], "delta": "0:00:00.033114", "end": "2025-01-18 11:34:20.852088", "rc": 0, "start": "2025-01-18 11:34:20.818974" } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:131 Saturday 18 January 2025 11:34:20 -0500 (0:00:00.422) 0:05:02.169 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 18 January 2025 11:34:20 -0500 (0:00:00.062) 0:05:02.232 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 18 January 2025 11:34:21 -0500 (0:00:00.034) 0:05:02.267 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 18 January 2025 11:34:21 -0500 (0:00:00.035) 0:05:02.302 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - images] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:141 Saturday 18 January 2025 11:34:21 -0500 (0:00:00.033) 0:05:02.336 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "images", "-n" ], "delta": "0:00:00.031480", "end": "2025-01-18 11:34:21.442167", "rc": 0, "start": "2025-01-18 11:34:21.410687" } TASK [fedora.linux_system_roles.podman : For testing and debugging - volumes] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:150 Saturday 18 January 2025 11:34:21 -0500 (0:00:00.430) 0:05:02.766 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "volume", "ls", "-n" ], "delta": "0:00:00.031736", "end": "2025-01-18 11:34:21.868267", "rc": 0, "start": "2025-01-18 11:34:21.836531" } TASK [fedora.linux_system_roles.podman : For testing and debugging - containers] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:159 Saturday 18 January 2025 11:34:22 -0500 (0:00:00.501) 0:05:03.268 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "ps", "--noheading" ], "delta": "0:00:00.030555", "end": "2025-01-18 11:34:22.368109", "rc": 0, "start": "2025-01-18 11:34:22.337554" } TASK [fedora.linux_system_roles.podman : For testing and debugging - networks] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:168 Saturday 18 January 2025 11:34:22 -0500 (0:00:00.418) 0:05:03.687 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "network", "ls", "-n", "-q" ], "delta": "0:00:00.030038", "end": "2025-01-18 11:34:22.786134", "rc": 0, "start": "2025-01-18 11:34:22.756096" } STDOUT: podman quadlet-basic-name TASK [fedora.linux_system_roles.podman : For testing and debugging - secrets] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:177 Saturday 18 January 2025 11:34:22 -0500 (0:00:00.418) 0:05:04.105 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - pods] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:187 Saturday 18 January 2025 11:34:23 -0500 (0:00:00.417) 0:05:04.523 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - services] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 Saturday 18 January 2025 11:34:23 -0500 (0:00:00.417) 0:05:04.941 ****** ok: [managed-node3] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "audit-rules.service": { "name": "audit-rules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autofs.service": { "name": "autofs.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "capsule@.service": { "name": "capsule@.service", "source": "systemd", "state": "unknown", "status": "static" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd-restricted.service": { "name": "chronyd-restricted.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-broker.service": { "name": "dbus-broker.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "active", "status": "alias" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd.service": { "name": "dhcpcd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd@.service": { "name": "dhcpcd@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "fips-crypto-policy-overlay.service": { "name": "fips-crypto-policy-overlay.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fsidd.service": { "name": "fsidd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "stopped", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "grub2-systemd-integration.service": { "name": "grub2-systemd-integration.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "hv_kvp_daemon.service": { "name": "hv_kvp_daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "logrotate.service": { "name": "logrotate.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm-devices-import.service": { "name": "lvm-devices-import.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "modprobe@.service": { "name": "modprobe@.service", "source": "systemd", "state": "unknown", "status": "static" }, "modprobe@configfs.service": { "name": "modprobe@configfs.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@dm_mod.service": { "name": "modprobe@dm_mod.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@drm.service": { "name": "modprobe@drm.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@efi_pstore.service": { "name": "modprobe@efi_pstore.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@fuse.service": { "name": "modprobe@fuse.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@loop.service": { "name": "modprobe@loop.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "netavark-dhcp-proxy.service": { "name": "netavark-dhcp-proxy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "netavark-firewalld-reload.service": { "name": "netavark-firewalld-reload.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "network.service": { "name": "network.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "static" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "pam_namespace.service": { "name": "pam_namespace.service", "source": "systemd", "state": "inactive", "status": "static" }, "pcscd.service": { "name": "pcscd.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "podman-auto-update.service": { "name": "podman-auto-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-clean-transient.service": { "name": "podman-clean-transient.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-kube@.service": { "name": "podman-kube@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "podman-restart.service": { "name": "podman-restart.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman.service": { "name": "podman.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "inactive", "status": "static" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quadlet-basic-mysql.service": { "name": "quadlet-basic-mysql.service", "source": "systemd", "state": "stopped", "status": "failed" }, "quadlet-basic-network.service": { "name": "quadlet-basic-network.service", "source": "systemd", "state": "stopped", "status": "generated" }, "quotaon-root.service": { "name": "quotaon-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "quotaon@.service": { "name": "quotaon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-svcgssd.service": { "name": "rpc-svcgssd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpmdb-migrate.service": { "name": "rpmdb-migrate.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rpmdb-rebuild.service": { "name": "rpmdb-rebuild.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "selinux-check-proper-disable.service": { "name": "selinux-check-proper-disable.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "serial-getty@ttyS0.service": { "name": "serial-getty@ttyS0.service", "source": "systemd", "state": "running", "status": "active" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ssh-host-keys-migration.service": { "name": "ssh-host-keys-migration.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-unix-local@.service": { "name": "sshd-unix-local@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-battery-check.service": { "name": "systemd-battery-check.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bless-boot.service": { "name": "systemd-bless-boot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-boot-check-no-failures.service": { "name": "systemd-boot-check-no-failures.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-boot-random-seed.service": { "name": "systemd-boot-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-boot-update.service": { "name": "systemd-boot-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-bootctl@.service": { "name": "systemd-bootctl@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-confext.service": { "name": "systemd-confext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-creds@.service": { "name": "systemd-creds@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-growfs-root.service": { "name": "systemd-growfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-growfs@.service": { "name": "systemd-growfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-clear.service": { "name": "systemd-hibernate-clear.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate-resume.service": { "name": "systemd-hibernate-resume.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald-sync@.service": { "name": "systemd-journald-sync@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-journald@.service": { "name": "systemd-journald@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-network-generator.service": { "name": "systemd-network-generator.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-networkd-wait-online.service": { "name": "systemd-networkd-wait-online.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-oomd.service": { "name": "systemd-oomd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-pcrextend@.service": { "name": "systemd-pcrextend@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrfs-root.service": { "name": "systemd-pcrfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pcrfs@.service": { "name": "systemd-pcrfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrlock-file-system.service": { "name": "systemd-pcrlock-file-system.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-code.service": { "name": "systemd-pcrlock-firmware-code.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-config.service": { "name": "systemd-pcrlock-firmware-config.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-machine-id.service": { "name": "systemd-pcrlock-machine-id.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-make-policy.service": { "name": "systemd-pcrlock-make-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-authority.service": { "name": "systemd-pcrlock-secureboot-authority.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-policy.service": { "name": "systemd-pcrlock-secureboot-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock@.service": { "name": "systemd-pcrlock@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrmachine.service": { "name": "systemd-pcrmachine.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-initrd.service": { "name": "systemd-pcrphase-initrd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-sysinit.service": { "name": "systemd-pcrphase-sysinit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase.service": { "name": "systemd-pcrphase.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-quotacheck-root.service": { "name": "systemd-quotacheck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-quotacheck@.service": { "name": "systemd-quotacheck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-repart.service": { "name": "systemd-repart.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-soft-reboot.service": { "name": "systemd-soft-reboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysext.service": { "name": "systemd-sysext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-sysext@.service": { "name": "systemd-sysext@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-sysupdate-reboot.service": { "name": "systemd-sysupdate-reboot.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysupdate.service": { "name": "systemd-sysupdate.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev-early.service": { "name": "systemd-tmpfiles-setup-dev-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup-early.service": { "name": "systemd-tpm2-setup-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup.service": { "name": "systemd-tpm2-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-load-credentials.service": { "name": "systemd-udev-load-credentials.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "active" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "active" }, "ypbind.service": { "name": "ypbind.service", "source": "systemd", "state": "stopped", "status": "not-found" } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 18 January 2025 11:34:25 -0500 (0:00:02.139) 0:05:07.080 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 18 January 2025 11:34:25 -0500 (0:00:00.034) 0:05:07.115 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": {}, "__podman_quadlet_str": "[Network]\nSubnet=192.168.29.0/24\nGateway=192.168.29.1\nLabel=app=wordpress\nNetworkName=quadlet-basic-name\n", "__podman_quadlet_template_src": "templates/quadlet-basic.network.j2" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 18 January 2025 11:34:25 -0500 (0:00:00.097) 0:05:07.213 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "absent", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 18 January 2025 11:34:26 -0500 (0:00:00.044) 0:05:07.257 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_quadlet_str", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 18 January 2025 11:34:26 -0500 (0:00:00.036) 0:05:07.294 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-basic", "__podman_quadlet_type": "network", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 18 January 2025 11:34:26 -0500 (0:00:00.051) 0:05:07.346 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 18 January 2025 11:34:26 -0500 (0:00:00.062) 0:05:07.408 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 18 January 2025 11:34:26 -0500 (0:00:00.040) 0:05:07.448 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 18 January 2025 11:34:26 -0500 (0:00:00.037) 0:05:07.486 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 18 January 2025 11:34:26 -0500 (0:00:00.048) 0:05:07.534 ****** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1737217683.9532003, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1737217679.1402063, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9160785, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "1643853349", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 18 January 2025 11:34:26 -0500 (0:00:00.396) 0:05:07.931 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 18 January 2025 11:34:26 -0500 (0:00:00.035) 0:05:07.966 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 18 January 2025 11:34:26 -0500 (0:00:00.124) 0:05:08.091 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 18 January 2025 11:34:26 -0500 (0:00:00.037) 0:05:08.128 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 18 January 2025 11:34:26 -0500 (0:00:00.038) 0:05:08.167 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 18 January 2025 11:34:26 -0500 (0:00:00.036) 0:05:08.203 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 18 January 2025 11:34:26 -0500 (0:00:00.038) 0:05:08.241 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 18 January 2025 11:34:27 -0500 (0:00:00.038) 0:05:08.279 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 18 January 2025 11:34:27 -0500 (0:00:00.036) 0:05:08.316 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-basic-network.service", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 18 January 2025 11:34:27 -0500 (0:00:00.059) 0:05:08.376 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 18 January 2025 11:34:27 -0500 (0:00:00.037) 0:05:08.413 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 18 January 2025 11:34:27 -0500 (0:00:00.035) 0:05:08.449 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/etc/containers/systemd/quadlet-basic.network", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 18 January 2025 11:34:27 -0500 (0:00:00.077) 0:05:08.527 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 18 January 2025 11:34:27 -0500 (0:00:00.043) 0:05:08.570 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4 Saturday 18 January 2025 11:34:27 -0500 (0:00:00.079) 0:05:08.649 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stop and disable service] ************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 Saturday 18 January 2025 11:34:27 -0500 (0:00:00.034) 0:05:08.684 ****** changed: [managed-node3] => { "changed": true, "enabled": false, "failed_when_result": false, "name": "quadlet-basic-network.service", "state": "stopped", "status": { "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0", "ActiveEnterTimestamp": "Sat 2025-01-18 11:32:25 EST", "ActiveEnterTimestampMonotonic": "518244945", "ActiveExitTimestampMonotonic": "0", "ActiveState": "active", "After": "basic.target sysinit.target network-online.target system.slice -.mount systemd-journald.socket", "AllowIsolate": "no", "AssertResult": "yes", "AssertTimestamp": "Sat 2025-01-18 11:32:24 EST", "AssertTimestampMonotonic": "518197102", "Before": "shutdown.target", "BindLogSockets": "no", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "35288000", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanLiveMount": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Sat 2025-01-18 11:32:24 EST", "ConditionTimestampMonotonic": "518197098", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroupId": "9203", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DebugInvocation": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "quadlet-basic-network.service", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveMemoryHigh": "3698237440", "EffectiveMemoryMax": "3698237440", "EffectiveTasksMax": "22365", "ExecMainCode": "1", "ExecMainExitTimestamp": "Sat 2025-01-18 11:32:25 EST", "ExecMainExitTimestampMonotonic": "518244630", "ExecMainHandoffTimestamp": "Sat 2025-01-18 11:32:25 EST", "ExecMainHandoffTimestampMonotonic": "518210371", "ExecMainPID": "57765", "ExecMainStartTimestamp": "Sat 2025-01-18 11:32:24 EST", "ExecMainStartTimestampMonotonic": "518197908", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman network create --ignore --subnet 192.168.29.0/24 --gateway 192.168.29.1 --label app=wordpress quadlet-basic-name ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman network create --ignore --subnet 192.168.29.0/24 --gateway 192.168.29.1 --label app=wordpress quadlet-basic-name ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/run/systemd/generator/quadlet-basic-network.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "quadlet-basic-network.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestamp": "Sat 2025-01-18 11:32:24 EST", "InactiveExitTimestampMonotonic": "518198466", "InvocationID": "f377deead68444848138e6d160b4ac9d", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13978", "LimitNPROCSoft": "13978", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13978", "LimitSIGPENDINGSoft": "13978", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LiveMountResult": "success", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureDurationUSec": "[not set]", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3270307840", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "16470016", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "[not set]", "MemorySwapMax": "infinity", "MemorySwapPeak": "0", "MemoryZSwapCurrent": "[not set]", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "quadlet-basic-network.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivatePIDs": "no", "PrivateTmp": "no", "PrivateTmpEx": "no", "PrivateUsers": "no", "PrivateUsersEx": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectControlGroupsEx": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "yes", "RemoveIPC": "no", "Requires": "system.slice sysinit.target -.mount", "RequiresMountsFor": "/run/containers", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "SourcePath": "/etc/containers/systemd/quadlet-basic.network", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestamp": "Sat 2025-01-18 11:32:25 EST", "StateChangeTimestampMonotonic": "518244945", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "exited", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-basic-network", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22365", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "infinity", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "oneshot", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "Wants": "network-online.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.podman : See if quadlet file exists] *********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:33 Saturday 18 January 2025 11:34:28 -0500 (0:00:00.802) 0:05:09.486 ****** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1737217972.4860866, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "19c9b17be2af9b9deca5c3bd327f048966750682", "ctime": 1737217943.6040912, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 390070492, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1737217943.3220913, "nlink": 1, "path": "/etc/containers/systemd/quadlet-basic.network", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 105, "uid": 0, "version": "2210524528", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:38 Saturday 18 January 2025 11:34:28 -0500 (0:00:00.401) 0:05:09.888 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Slurp quadlet file] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:6 Saturday 18 January 2025 11:34:28 -0500 (0:00:00.152) 0:05:10.040 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:12 Saturday 18 January 2025 11:34:29 -0500 (0:00:00.379) 0:05:10.420 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet yaml file] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:44 Saturday 18 January 2025 11:34:29 -0500 (0:00:00.053) 0:05:10.473 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Reset raw variable] ******************* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:52 Saturday 18 January 2025 11:34:29 -0500 (0:00:00.035) 0:05:10.508 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_raw": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Remove quadlet file] ****************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42 Saturday 18 January 2025 11:34:29 -0500 (0:00:00.034) 0:05:10.543 ****** changed: [managed-node3] => { "changed": true, "path": "/etc/containers/systemd/quadlet-basic.network", "state": "absent" } TASK [fedora.linux_system_roles.podman : Refresh systemd] ********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:48 Saturday 18 January 2025 11:34:29 -0500 (0:00:00.390) 0:05:10.934 ****** ok: [managed-node3] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Remove managed resource] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:58 Saturday 18 January 2025 11:34:30 -0500 (0:00:00.743) 0:05:11.678 ****** changed: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } TASK [fedora.linux_system_roles.podman : Remove volumes] *********************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99 Saturday 18 January 2025 11:34:30 -0500 (0:00:00.435) 0:05:12.113 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Clear parsed podman variable] ********* task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:116 Saturday 18 January 2025 11:34:30 -0500 (0:00:00.046) 0:05:12.159 ****** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_parsed": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ******** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:120 Saturday 18 January 2025 11:34:30 -0500 (0:00:00.036) 0:05:12.196 ****** changed: [managed-node3] => { "changed": true, "cmd": [ "podman", "image", "prune", "--all", "-f" ], "delta": "0:00:00.031788", "end": "2025-01-18 11:34:31.302903", "rc": 0, "start": "2025-01-18 11:34:31.271115" } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:131 Saturday 18 January 2025 11:34:31 -0500 (0:00:00.424) 0:05:12.621 ****** included: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 18 January 2025 11:34:31 -0500 (0:00:00.061) 0:05:12.682 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 18 January 2025 11:34:31 -0500 (0:00:00.033) 0:05:12.716 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 18 January 2025 11:34:31 -0500 (0:00:00.034) 0:05:12.751 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - images] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:141 Saturday 18 January 2025 11:34:31 -0500 (0:00:00.032) 0:05:12.783 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "images", "-n" ], "delta": "0:00:00.032270", "end": "2025-01-18 11:34:31.887895", "rc": 0, "start": "2025-01-18 11:34:31.855625" } TASK [fedora.linux_system_roles.podman : For testing and debugging - volumes] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:150 Saturday 18 January 2025 11:34:31 -0500 (0:00:00.422) 0:05:13.205 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "volume", "ls", "-n" ], "delta": "0:00:00.032607", "end": "2025-01-18 11:34:32.310721", "rc": 0, "start": "2025-01-18 11:34:32.278114" } TASK [fedora.linux_system_roles.podman : For testing and debugging - containers] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:159 Saturday 18 January 2025 11:34:32 -0500 (0:00:00.423) 0:05:13.629 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "ps", "--noheading" ], "delta": "0:00:00.031279", "end": "2025-01-18 11:34:32.732894", "rc": 0, "start": "2025-01-18 11:34:32.701615" } TASK [fedora.linux_system_roles.podman : For testing and debugging - networks] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:168 Saturday 18 January 2025 11:34:32 -0500 (0:00:00.420) 0:05:14.049 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "network", "ls", "-n", "-q" ], "delta": "0:00:00.030578", "end": "2025-01-18 11:34:33.151609", "rc": 0, "start": "2025-01-18 11:34:33.121031" } STDOUT: podman TASK [fedora.linux_system_roles.podman : For testing and debugging - secrets] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:177 Saturday 18 January 2025 11:34:33 -0500 (0:00:00.514) 0:05:14.563 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - pods] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:187 Saturday 18 January 2025 11:34:33 -0500 (0:00:00.424) 0:05:14.987 ****** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - services] *** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 Saturday 18 January 2025 11:34:34 -0500 (0:00:00.421) 0:05:15.409 ****** ok: [managed-node3] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "audit-rules.service": { "name": "audit-rules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autofs.service": { "name": "autofs.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "capsule@.service": { "name": "capsule@.service", "source": "systemd", "state": "unknown", "status": "static" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd-restricted.service": { "name": "chronyd-restricted.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-broker.service": { "name": "dbus-broker.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "active", "status": "alias" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd.service": { "name": "dhcpcd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd@.service": { "name": "dhcpcd@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "fips-crypto-policy-overlay.service": { "name": "fips-crypto-policy-overlay.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fsidd.service": { "name": "fsidd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "stopped", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "grub2-systemd-integration.service": { "name": "grub2-systemd-integration.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "hv_kvp_daemon.service": { "name": "hv_kvp_daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "logrotate.service": { "name": "logrotate.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm-devices-import.service": { "name": "lvm-devices-import.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "modprobe@.service": { "name": "modprobe@.service", "source": "systemd", "state": "unknown", "status": "static" }, "modprobe@configfs.service": { "name": "modprobe@configfs.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@dm_mod.service": { "name": "modprobe@dm_mod.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@drm.service": { "name": "modprobe@drm.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@efi_pstore.service": { "name": "modprobe@efi_pstore.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@fuse.service": { "name": "modprobe@fuse.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@loop.service": { "name": "modprobe@loop.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "netavark-dhcp-proxy.service": { "name": "netavark-dhcp-proxy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "netavark-firewalld-reload.service": { "name": "netavark-firewalld-reload.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "network.service": { "name": "network.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "static" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "pam_namespace.service": { "name": "pam_namespace.service", "source": "systemd", "state": "inactive", "status": "static" }, "pcscd.service": { "name": "pcscd.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "podman-auto-update.service": { "name": "podman-auto-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-clean-transient.service": { "name": "podman-clean-transient.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-kube@.service": { "name": "podman-kube@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "podman-restart.service": { "name": "podman-restart.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman.service": { "name": "podman.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "inactive", "status": "static" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quadlet-basic-mysql.service": { "name": "quadlet-basic-mysql.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "quotaon-root.service": { "name": "quotaon-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "quotaon@.service": { "name": "quotaon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-svcgssd.service": { "name": "rpc-svcgssd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpmdb-migrate.service": { "name": "rpmdb-migrate.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rpmdb-rebuild.service": { "name": "rpmdb-rebuild.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "selinux-check-proper-disable.service": { "name": "selinux-check-proper-disable.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "serial-getty@ttyS0.service": { "name": "serial-getty@ttyS0.service", "source": "systemd", "state": "running", "status": "active" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ssh-host-keys-migration.service": { "name": "ssh-host-keys-migration.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-unix-local@.service": { "name": "sshd-unix-local@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-battery-check.service": { "name": "systemd-battery-check.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bless-boot.service": { "name": "systemd-bless-boot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-boot-check-no-failures.service": { "name": "systemd-boot-check-no-failures.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-boot-random-seed.service": { "name": "systemd-boot-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-boot-update.service": { "name": "systemd-boot-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-bootctl@.service": { "name": "systemd-bootctl@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-confext.service": { "name": "systemd-confext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-creds@.service": { "name": "systemd-creds@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-growfs-root.service": { "name": "systemd-growfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-growfs@.service": { "name": "systemd-growfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-clear.service": { "name": "systemd-hibernate-clear.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate-resume.service": { "name": "systemd-hibernate-resume.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald-sync@.service": { "name": "systemd-journald-sync@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-journald@.service": { "name": "systemd-journald@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-network-generator.service": { "name": "systemd-network-generator.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-networkd-wait-online.service": { "name": "systemd-networkd-wait-online.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-oomd.service": { "name": "systemd-oomd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-pcrextend@.service": { "name": "systemd-pcrextend@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrfs-root.service": { "name": "systemd-pcrfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pcrfs@.service": { "name": "systemd-pcrfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrlock-file-system.service": { "name": "systemd-pcrlock-file-system.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-code.service": { "name": "systemd-pcrlock-firmware-code.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-config.service": { "name": "systemd-pcrlock-firmware-config.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-machine-id.service": { "name": "systemd-pcrlock-machine-id.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-make-policy.service": { "name": "systemd-pcrlock-make-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-authority.service": { "name": "systemd-pcrlock-secureboot-authority.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-policy.service": { "name": "systemd-pcrlock-secureboot-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock@.service": { "name": "systemd-pcrlock@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrmachine.service": { "name": "systemd-pcrmachine.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-initrd.service": { "name": "systemd-pcrphase-initrd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-sysinit.service": { "name": "systemd-pcrphase-sysinit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase.service": { "name": "systemd-pcrphase.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-quotacheck-root.service": { "name": "systemd-quotacheck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-quotacheck@.service": { "name": "systemd-quotacheck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-repart.service": { "name": "systemd-repart.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-soft-reboot.service": { "name": "systemd-soft-reboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysext.service": { "name": "systemd-sysext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-sysext@.service": { "name": "systemd-sysext@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-sysupdate-reboot.service": { "name": "systemd-sysupdate-reboot.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysupdate.service": { "name": "systemd-sysupdate.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev-early.service": { "name": "systemd-tmpfiles-setup-dev-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup-early.service": { "name": "systemd-tpm2-setup-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup.service": { "name": "systemd-tpm2-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-load-credentials.service": { "name": "systemd-udev-load-credentials.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "active" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "active" }, "ypbind.service": { "name": "ypbind.service", "source": "systemd", "state": "stopped", "status": "not-found" } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 18 January 2025 11:34:36 -0500 (0:00:02.153) 0:05:17.562 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Cancel linger] ************************ task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:196 Saturday 18 January 2025 11:34:36 -0500 (0:00:00.034) 0:05:17.597 ****** skipping: [managed-node3] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Handle credential files - absent] ***** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:202 Saturday 18 January 2025 11:34:36 -0500 (0:00:00.030) 0:05:17.627 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - absent] ******** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:211 Saturday 18 January 2025 11:34:36 -0500 (0:00:00.031) 0:05:17.659 ****** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [Ensure no resources] ***************************************************** task path: /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_basic.yml:325 Saturday 18 January 2025 11:34:36 -0500 (0:00:00.048) 0:05:17.707 ****** ok: [managed-node3] => { "changed": false } MSG: All assertions passed PLAY RECAP ********************************************************************* managed-node3 : ok=973 changed=79 unreachable=0 failed=1 skipped=1082 rescued=1 ignored=1 TASKS RECAP ******************************************************************** Saturday 18 January 2025 11:34:36 -0500 (0:00:00.036) 0:05:17.744 ****** =============================================================================== fedora.linux_system_roles.podman : Ensure container images are present --- 7.84s /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 fedora.linux_system_roles.podman : Ensure container images are present --- 7.28s /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 fedora.linux_system_roles.podman : For testing and debugging - services --- 4.21s /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 fedora.linux_system_roles.podman : For testing and debugging - services --- 3.08s /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 fedora.linux_system_roles.podman : For testing and debugging - services --- 3.02s /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 fedora.linux_system_roles.podman : For testing and debugging - services --- 2.24s /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 fedora.linux_system_roles.podman : For testing and debugging - services --- 2.19s /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 fedora.linux_system_roles.podman : For testing and debugging - services --- 2.18s /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 fedora.linux_system_roles.podman : For testing and debugging - services --- 2.15s /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 fedora.linux_system_roles.podman : For testing and debugging - services --- 2.15s /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 fedora.linux_system_roles.podman : For testing and debugging - services --- 2.14s /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 Check files ------------------------------------------------------------- 2.13s /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_basic.yml:201 fedora.linux_system_roles.podman : For testing and debugging - services --- 2.00s /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 fedora.linux_system_roles.podman : Manage each secret ------------------- 1.68s /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:41 fedora.linux_system_roles.podman : Ensure the quadlet directory is present --- 1.67s /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39 fedora.linux_system_roles.podman : Gather the package facts ------------- 1.22s /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 fedora.linux_system_roles.podman : Gather the package facts ------------- 1.20s /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 Gathering Facts --------------------------------------------------------- 1.19s /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_basic.yml:9 Check files ------------------------------------------------------------- 1.13s /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_basic.yml:253 fedora.linux_system_roles.podman : Start service ------------------------ 1.07s /tmp/collections-PRc/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110