ansible-playbook [core 2.17.7]
  config file = None
  configured module search path = ['/root/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules']
  ansible python module location = /usr/local/lib/python3.12/site-packages/ansible
  ansible collection location = /tmp/collections-9ny
  executable location = /usr/local/bin/ansible-playbook
  python version = 3.12.6 (main, Oct 29 2024, 00:00:00) [GCC 14.2.1 20240801 (Red Hat 14.2.1-2)] (/usr/bin/python3.12)
  jinja version = 3.1.4
  libyaml = True
No config file found; using defaults
running playbook inside collection fedora.linux_system_roles
redirecting (type: callback) ansible.builtin.debug to ansible.posix.debug
redirecting (type: callback) ansible.builtin.debug to ansible.posix.debug
redirecting (type: callback) ansible.builtin.profile_tasks to ansible.posix.profile_tasks
Skipping callback 'default', as we already have a stdout callback.
Skipping callback 'minimal', as we already have a stdout callback.
Skipping callback 'oneline', as we already have a stdout callback.

PLAYBOOK: tests_auth_and_security.yml ******************************************
2 plays in /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/tests/podman/tests_auth_and_security.yml

PLAY [all] *********************************************************************

TASK [Include vault variables] *************************************************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/tests/podman/tests_auth_and_security.yml:5
Saturday 14 December 2024  11:30:04 -0500 (0:00:00.008)       0:00:00.008 ***** 
ok: [managed-node1] => {
    "ansible_facts": {
        "__podman_test_password": {
            "__ansible_vault": "$ANSIBLE_VAULT;1.1;AES256\n35383939616163653333633431363463313831383037386236646138333162396161356130303461\n3932623930643263313563336163316337643562333936360a363538636631313039343233383732\n38666530383538656639363465313230343533386130303833336434303438333161656262346562\n3362626538613031640a663330613638366132356534363534353239616666653466353961323533\n6565\n"
        },
        "mysql_container_root_password": {
            "__ansible_vault": "$ANSIBLE_VAULT;1.1;AES256\n61333932373230333539663035366431326163363166363036323963623131363530326231303634\n6635326161643165363366323062333334363730376631660a393566366139353861656364656661\n38653463363837336639363032646433666361646535366137303464623261313663643336306465\n6264663730656337310a343962353137386238383064646533366433333437303566656433386233\n34343235326665646661623131643335313236313131353661386338343366316261643634653633\n3832313034366536616531323963333234326461353130303532\n"
        }
    },
    "ansible_included_var_files": [
        "/tmp/podman-olA/tests/vars/vault-variables.yml"
    ],
    "changed": false
}

PLAY [Test various aspects of authentication and security settings] ************

TASK [Gathering Facts] *********************************************************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/tests/podman/tests_auth_and_security.yml:9
Saturday 14 December 2024  11:30:04 -0500 (0:00:00.036)       0:00:00.044 ***** 
[WARNING]: Platform linux on host managed-node1 is using the discovered Python
interpreter at /usr/bin/python3.12, but future installation of another Python
interpreter could change the meaning of that path. See
https://docs.ansible.com/ansible-
core/2.17/reference_appendices/interpreter_discovery.html for more information.
ok: [managed-node1]

TASK [Run the role with no config to install podman] ***************************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/tests/podman/tests_auth_and_security.yml:46
Saturday 14 December 2024  11:30:07 -0500 (0:00:03.180)       0:00:03.224 ***** 
included: fedora.linux_system_roles.podman for managed-node1

TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:3
Saturday 14 December 2024  11:30:07 -0500 (0:00:00.063)       0:00:03.288 ***** 
included: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml for managed-node1

TASK [fedora.linux_system_roles.podman : Ensure ansible_facts used by role] ****
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:3
Saturday 14 December 2024  11:30:07 -0500 (0:00:00.022)       0:00:03.311 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "__podman_required_facts | difference(ansible_facts.keys() | list) | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Check if system is ostree] ************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:11
Saturday 14 December 2024  11:30:07 -0500 (0:00:00.040)       0:00:03.351 ***** 
ok: [managed-node1] => {
    "changed": false,
    "stat": {
        "exists": false
    }
}

TASK [fedora.linux_system_roles.podman : Set flag to indicate system is ostree] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:16
Saturday 14 December 2024  11:30:08 -0500 (0:00:00.524)       0:00:03.876 ***** 
ok: [managed-node1] => {
    "ansible_facts": {
        "__podman_is_ostree": false
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Check if transactional-update exists in /sbin] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:23
Saturday 14 December 2024  11:30:08 -0500 (0:00:00.036)       0:00:03.912 ***** 
ok: [managed-node1] => {
    "changed": false,
    "stat": {
        "exists": false
    }
}

TASK [fedora.linux_system_roles.podman : Set flag if transactional-update exists] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:28
Saturday 14 December 2024  11:30:08 -0500 (0:00:00.375)       0:00:04.287 ***** 
ok: [managed-node1] => {
    "ansible_facts": {
        "__podman_is_transactional": false
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:32
Saturday 14 December 2024  11:30:08 -0500 (0:00:00.026)       0:00:04.313 ***** 
ok: [managed-node1] => (item=RedHat.yml) => {
    "ansible_facts": {
        "__podman_packages": [
            "podman",
            "shadow-utils-subid"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/vars/RedHat.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "RedHat.yml"
}
skipping: [managed-node1] => (item=CentOS.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "CentOS.yml",
    "skip_reason": "Conditional result was False"
}
ok: [managed-node1] => (item=CentOS_10.yml) => {
    "ansible_facts": {
        "__podman_packages": [
            "iptables-nft",
            "podman",
            "shadow-utils-subid"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_10.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_10.yml"
}
ok: [managed-node1] => (item=CentOS_10.yml) => {
    "ansible_facts": {
        "__podman_packages": [
            "iptables-nft",
            "podman",
            "shadow-utils-subid"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_10.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_10.yml"
}

TASK [fedora.linux_system_roles.podman : Gather the package facts] *************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6
Saturday 14 December 2024  11:30:08 -0500 (0:00:00.065)       0:00:04.379 ***** 
ok: [managed-node1] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Enable copr if requested] *************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:10
Saturday 14 December 2024  11:30:09 -0500 (0:00:01.132)       0:00:05.511 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "podman_use_copr | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Ensure required packages are installed] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:14
Saturday 14 December 2024  11:30:09 -0500 (0:00:00.132)       0:00:05.643 ***** 
changed: [managed-node1] => {
    "changed": true,
    "rc": 0,
    "results": [
        "Installed: shadow-utils-subid-2:4.15.0-5.el10.x86_64",
        "Installed: conmon-2:2.1.12-3.el10.x86_64",
        "Installed: catatonit-5:0.2.0-1.el10.x86_64",
        "Installed: container-selinux-3:2.234.2-1.el10.noarch",
        "Installed: containers-common-5:0.60.2-13.el10.noarch",
        "Installed: containers-common-extra-5:0.60.2-13.el10.noarch",
        "Installed: protobuf-c-1.5.0-6.el10.x86_64",
        "Installed: passt-0^20241121.g238c69f-1.el10.x86_64",
        "Installed: libnet-1.3-7.el10.x86_64",
        "Installed: passt-selinux-0^20241121.g238c69f-1.el10.noarch",
        "Installed: podman-6:5.3.1-3.el10.x86_64",
        "Installed: gpgme-1.23.2-6.el10.x86_64",
        "Installed: netavark-2:1.13.1-1.el10.x86_64",
        "Installed: criu-3.19-6.el10.x86_64",
        "Installed: criu-libs-3.19-6.el10.x86_64",
        "Installed: crun-1.19-1.el10.x86_64",
        "Installed: aardvark-dns-2:1.13.1-1.el10.x86_64"
    ]
}
lsrpackages: iptables-nft podman shadow-utils-subid

TASK [fedora.linux_system_roles.podman : Notify user that reboot is needed to apply changes] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:28
Saturday 14 December 2024  11:31:18 -0500 (0:01:08.369)       0:01:14.013 ***** 
skipping: [managed-node1] => {
    "false_condition": "__podman_is_transactional | d(false)"
}

TASK [fedora.linux_system_roles.podman : Reboot transactional update systems] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:33
Saturday 14 December 2024  11:31:18 -0500 (0:00:00.046)       0:01:14.060 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "__podman_is_transactional | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Fail if reboot is needed and not set] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:38
Saturday 14 December 2024  11:31:18 -0500 (0:00:00.046)       0:01:14.106 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "__podman_is_transactional | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Get podman version] *******************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:46
Saturday 14 December 2024  11:31:18 -0500 (0:00:00.046)       0:01:14.153 ***** 
ok: [managed-node1] => {
    "changed": false,
    "cmd": [
        "podman",
        "--version"
    ],
    "delta": "0:00:00.032229",
    "end": "2024-12-14 11:31:18.856214",
    "rc": 0,
    "start": "2024-12-14 11:31:18.823985"
}

STDOUT:

podman version 5.3.1


STDERR:

time="2024-12-14T11:31:18-05:00" level=warning msg="Failed to decode the keys [\"storage.options.overlay.pull_options\" \"storage.options.overlay.pull_options\" \"storage.options.overlay.pull_options\" \"storage.options.overlay.pull_options.enable_partial_images\" \"storage.options.overlay.pull_options.use_hard_links\" \"storage.options.overlay.pull_options.ostree_repos\" \"storage.options.overlay.pull_options.convert_images\"] from \"/usr/share/containers/storage.conf\""

TASK [fedora.linux_system_roles.podman : Set podman version] *******************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:52
Saturday 14 December 2024  11:31:18 -0500 (0:00:00.538)       0:01:14.691 ***** 
ok: [managed-node1] => {
    "ansible_facts": {
        "podman_version": "5.3.1"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Podman package version must be 4.2 or later] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:56
Saturday 14 December 2024  11:31:18 -0500 (0:00:00.032)       0:01:14.723 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "podman_version is version(\"4.2\", \"<\")",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:63
Saturday 14 December 2024  11:31:18 -0500 (0:00:00.030)       0:01:14.754 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "(podman_quadlet_specs | length > 0) or (podman_secrets | length > 0)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:73
Saturday 14 December 2024  11:31:19 -0500 (0:00:00.048)       0:01:14.803 ***** 
META: end_host conditional evaluated to False, continuing execution for managed-node1
skipping: [managed-node1] => {
    "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node1"
}

MSG:

end_host conditional evaluated to false, continuing execution for managed-node1

TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:80
Saturday 14 December 2024  11:31:19 -0500 (0:00:00.053)       0:01:14.857 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "__has_type_pod or __has_pod_file_ext or __has_pod_file_src_ext or __has_pod_template_src_ext or __has_pod_template_src_ext_j2",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:96
Saturday 14 December 2024  11:31:19 -0500 (0:00:00.054)       0:01:14.911 ***** 
META: end_host conditional evaluated to False, continuing execution for managed-node1
skipping: [managed-node1] => {
    "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node1"
}

MSG:

end_host conditional evaluated to false, continuing execution for managed-node1

TASK [fedora.linux_system_roles.podman : Check user and group information] *****
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:109
Saturday 14 December 2024  11:31:19 -0500 (0:00:00.040)       0:01:14.952 ***** 
included: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1

TASK [fedora.linux_system_roles.podman : Get user information] *****************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2
Saturday 14 December 2024  11:31:19 -0500 (0:00:00.087)       0:01:15.039 ***** 
ok: [managed-node1] => {
    "ansible_facts": {
        "getent_passwd": {
            "root": [
                "x",
                "0",
                "0",
                "Super User",
                "/root",
                "/bin/bash"
            ]
        }
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Fail if user does not exist] **********
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9
Saturday 14 December 2024  11:31:19 -0500 (0:00:00.509)       0:01:15.548 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set group for podman user] ************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16
Saturday 14 December 2024  11:31:19 -0500 (0:00:00.031)       0:01:15.580 ***** 
ok: [managed-node1] => {
    "ansible_facts": {
        "__podman_group": "0"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : See if getsubids exists] **************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:28
Saturday 14 December 2024  11:31:19 -0500 (0:00:00.044)       0:01:15.625 ***** 
ok: [managed-node1] => {
    "changed": false,
    "stat": {
        "atime": 1730678400.0,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 32,
        "charset": "binary",
        "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97",
        "ctime": 1734193861.1678128,
        "dev": 51714,
        "device_type": 0,
        "executable": true,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 8859182,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "application/x-pie-executable",
        "mode": "0755",
        "mtime": 1730678400.0,
        "nlink": 1,
        "path": "/usr/bin/getsubids",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": true,
        "rusr": true,
        "size": 15744,
        "uid": 0,
        "version": "2878164177",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": true,
        "xoth": true,
        "xusr": true
    }
}

TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:39
Saturday 14 December 2024  11:31:20 -0500 (0:00:00.374)       0:01:15.999 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "__podman_user not in [\"root\", \"0\"]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:44
Saturday 14 December 2024  11:31:20 -0500 (0:00:00.029)       0:01:16.029 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "__podman_user not in [\"root\", \"0\"]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:49
Saturday 14 December 2024  11:31:20 -0500 (0:00:00.029)       0:01:16.058 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "__podman_user not in [\"root\", \"0\"]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Get subuid file] **********************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:62
Saturday 14 December 2024  11:31:20 -0500 (0:00:00.047)       0:01:16.105 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Get subgid file] **********************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:67
Saturday 14 December 2024  11:31:20 -0500 (0:00:00.044)       0:01:16.149 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:72
Saturday 14 December 2024  11:31:20 -0500 (0:00:00.045)       0:01:16.194 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ******
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:82
Saturday 14 December 2024  11:31:20 -0500 (0:00:00.045)       0:01:16.240 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ******
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:89
Saturday 14 December 2024  11:31:20 -0500 (0:00:00.047)       0:01:16.287 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set config file paths] ****************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:115
Saturday 14 December 2024  11:31:20 -0500 (0:00:00.045)       0:01:16.333 ***** 
ok: [managed-node1] => {
    "ansible_facts": {
        "__podman_container_conf_file": "/etc/containers/containers.conf.d/50-systemroles.conf",
        "__podman_policy_json_file": "/etc/containers/policy.json",
        "__podman_registries_conf_file": "/etc/containers/registries.conf.d/50-systemroles.conf",
        "__podman_storage_conf_file": "/etc/containers/storage.conf"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Handle container.conf.d] **************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:124
Saturday 14 December 2024  11:31:20 -0500 (0:00:00.062)       0:01:16.395 ***** 
included: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml for managed-node1

TASK [fedora.linux_system_roles.podman : Ensure containers.d exists] ***********
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:5
Saturday 14 December 2024  11:31:20 -0500 (0:00:00.090)       0:01:16.485 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "podman_containers_conf | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Update container config file] *********
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:13
Saturday 14 December 2024  11:31:20 -0500 (0:00:00.050)       0:01:16.536 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "podman_containers_conf | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Handle registries.conf.d] *************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:127
Saturday 14 December 2024  11:31:20 -0500 (0:00:00.033)       0:01:16.569 ***** 
included: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml for managed-node1

TASK [fedora.linux_system_roles.podman : Ensure registries.d exists] ***********
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:5
Saturday 14 December 2024  11:31:20 -0500 (0:00:00.061)       0:01:16.630 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "podman_registries_conf | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Update registries config file] ********
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:13
Saturday 14 December 2024  11:31:20 -0500 (0:00:00.065)       0:01:16.696 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "podman_registries_conf | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Handle storage.conf] ******************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:130
Saturday 14 December 2024  11:31:20 -0500 (0:00:00.029)       0:01:16.725 ***** 
included: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml for managed-node1

TASK [fedora.linux_system_roles.podman : Ensure storage.conf parent dir exists] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:5
Saturday 14 December 2024  11:31:21 -0500 (0:00:00.053)       0:01:16.779 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "podman_storage_conf | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Update storage config file] ***********
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:13
Saturday 14 December 2024  11:31:21 -0500 (0:00:00.030)       0:01:16.809 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "podman_storage_conf | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Handle policy.json] *******************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:133
Saturday 14 December 2024  11:31:21 -0500 (0:00:00.029)       0:01:16.838 ***** 
included: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml for managed-node1

TASK [fedora.linux_system_roles.podman : Ensure policy.json parent dir exists] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:6
Saturday 14 December 2024  11:31:21 -0500 (0:00:00.063)       0:01:16.902 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "podman_policy_json | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Stat the policy.json file] ************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:14
Saturday 14 December 2024  11:31:21 -0500 (0:00:00.053)       0:01:16.956 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "podman_policy_json | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Get the existing policy.json] *********
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:19
Saturday 14 December 2024  11:31:21 -0500 (0:00:00.071)       0:01:17.027 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "podman_policy_json | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Write new policy.json file] ***********
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:25
Saturday 14 December 2024  11:31:21 -0500 (0:00:00.055)       0:01:17.083 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "podman_policy_json | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [Manage firewall for specified ports] *************************************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:139
Saturday 14 December 2024  11:31:21 -0500 (0:00:00.051)       0:01:17.135 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "podman_firewall | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [Manage selinux for specified ports] **************************************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:146
Saturday 14 December 2024  11:31:21 -0500 (0:00:00.051)       0:01:17.186 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "podman_selinux_ports | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Keep track of users that need to cancel linger] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:153
Saturday 14 December 2024  11:31:21 -0500 (0:00:00.048)       0:01:17.235 ***** 
ok: [managed-node1] => {
    "ansible_facts": {
        "__podman_cancel_user_linger": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Handle certs.d files - present] *******
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:157
Saturday 14 December 2024  11:31:21 -0500 (0:00:00.048)       0:01:17.283 ***** 
skipping: [managed-node1] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Handle credential files - present] ****
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:166
Saturday 14 December 2024  11:31:21 -0500 (0:00:00.041)       0:01:17.324 ***** 
skipping: [managed-node1] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Handle secrets] ***********************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:175
Saturday 14 December 2024  11:31:21 -0500 (0:00:00.043)       0:01:17.368 ***** 
skipping: [managed-node1] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Handle Kubernetes specifications] *****
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:182
Saturday 14 December 2024  11:31:21 -0500 (0:00:00.041)       0:01:17.409 ***** 
skipping: [managed-node1] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Handle Quadlet specifications] ********
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:189
Saturday 14 December 2024  11:31:21 -0500 (0:00:00.043)       0:01:17.453 ***** 
skipping: [managed-node1] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Cancel linger] ************************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:196
Saturday 14 December 2024  11:31:21 -0500 (0:00:00.041)       0:01:17.495 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.podman : Handle credential files - absent] *****
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:202
Saturday 14 December 2024  11:31:21 -0500 (0:00:00.041)       0:01:17.536 ***** 
skipping: [managed-node1] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Handle certs.d files - absent] ********
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:211
Saturday 14 December 2024  11:31:21 -0500 (0:00:00.031)       0:01:17.567 ***** 
skipping: [managed-node1] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [Setup registry] **********************************************************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/tests/podman/tests_auth_and_security.yml:50
Saturday 14 December 2024  11:31:21 -0500 (0:00:00.110)       0:01:17.678 ***** 
included: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/tests/podman/tasks/setup_registry.yml for managed-node1

TASK [Create a temporary directory] ********************************************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/tests/podman/tasks/setup_registry.yml:5
Saturday 14 December 2024  11:31:22 -0500 (0:00:00.091)       0:01:17.769 ***** 
changed: [managed-node1] => {
    "changed": true,
    "gid": 0,
    "group": "root",
    "mode": "0700",
    "owner": "root",
    "path": "/tmp/lsr_6ehua9m0_podman",
    "secontext": "unconfined_u:object_r:user_tmp_t:s0",
    "size": 6,
    "state": "directory",
    "uid": 0
}

TASK [Set authdir] *************************************************************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/tests/podman/tasks/setup_registry.yml:12
Saturday 14 December 2024  11:31:22 -0500 (0:00:00.449)       0:01:18.219 ***** 
ok: [managed-node1] => {
    "ansible_facts": {
        "__podman_registry_authdir": "/tmp/lsr_6ehua9m0_podman/auth",
        "__podman_test_authfile": "/tmp/lsr_6ehua9m0_podman/auth/auth.json"
    },
    "changed": false
}

TASK [Create authdir] **********************************************************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/tests/podman/tasks/setup_registry.yml:17
Saturday 14 December 2024  11:31:22 -0500 (0:00:00.038)       0:01:18.257 ***** 
changed: [managed-node1] => {
    "changed": true,
    "gid": 0,
    "group": "root",
    "mode": "0700",
    "owner": "root",
    "path": "/tmp/lsr_6ehua9m0_podman/auth",
    "secontext": "unconfined_u:object_r:user_tmp_t:s0",
    "size": 6,
    "state": "directory",
    "uid": 0
}

TASK [Generate certificates for registry] **************************************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/tests/podman/tasks/setup_registry.yml:23
Saturday 14 December 2024  11:31:22 -0500 (0:00:00.496)       0:01:18.754 ***** 
included: fedora.linux_system_roles.certificate for managed-node1

TASK [fedora.linux_system_roles.certificate : Set version specific variables] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:2
Saturday 14 December 2024  11:31:23 -0500 (0:00:00.105)       0:01:18.859 ***** 
included: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/set_vars.yml for managed-node1

TASK [fedora.linux_system_roles.certificate : Ensure ansible_facts used by role] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/set_vars.yml:2
Saturday 14 December 2024  11:31:23 -0500 (0:00:00.075)       0:01:18.935 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "__certificate_required_facts | difference(ansible_facts.keys() | list) | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.certificate : Check if system is ostree] *******
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/set_vars.yml:10
Saturday 14 December 2024  11:31:23 -0500 (0:00:00.059)       0:01:18.995 ***** 
ok: [managed-node1] => {
    "changed": false,
    "stat": {
        "exists": false
    }
}

TASK [fedora.linux_system_roles.certificate : Set flag to indicate system is ostree] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/set_vars.yml:15
Saturday 14 December 2024  11:31:23 -0500 (0:00:00.423)       0:01:19.418 ***** 
ok: [managed-node1] => {
    "ansible_facts": {
        "__certificate_is_ostree": false
    },
    "changed": false
}

TASK [fedora.linux_system_roles.certificate : Set platform/version specific variables] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/set_vars.yml:19
Saturday 14 December 2024  11:31:23 -0500 (0:00:00.058)       0:01:19.477 ***** 
skipping: [managed-node1] => (item=RedHat.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "RedHat.yml",
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node1] => (item=CentOS.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "CentOS.yml",
    "skip_reason": "Conditional result was False"
}
ok: [managed-node1] => (item=CentOS_10.yml) => {
    "ansible_facts": {
        "__certificate_certmonger_packages": [
            "certmonger",
            "python3-packaging"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/certificate/vars/CentOS_10.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_10.yml"
}
ok: [managed-node1] => (item=CentOS_10.yml) => {
    "ansible_facts": {
        "__certificate_certmonger_packages": [
            "certmonger",
            "python3-packaging"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/certificate/vars/CentOS_10.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_10.yml"
}

TASK [fedora.linux_system_roles.certificate : Ensure certificate role dependencies are installed] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:5
Saturday 14 December 2024  11:31:23 -0500 (0:00:00.103)       0:01:19.580 ***** 
changed: [managed-node1] => {
    "changed": true,
    "rc": 0,
    "results": [
        "Installed: python3-cffi-1.16.0-7.el10.x86_64",
        "Installed: python3-cryptography-43.0.0-4.el10.x86_64",
        "Installed: python3-ply-3.11-25.el10.noarch",
        "Installed: python3-pycparser-2.20-16.el10.noarch",
        "Installed: python3-pyasn1-0.6.1-1.el10.noarch"
    ]
}
lsrpackages: python3-cryptography python3-dbus python3-pyasn1

TASK [fedora.linux_system_roles.certificate : Ensure provider packages are installed] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:23
Saturday 14 December 2024  11:31:26 -0500 (0:00:02.312)       0:01:21.892 ***** 
changed: [managed-node1] => (item=certmonger) => {
    "__certificate_provider": "certmonger",
    "ansible_loop_var": "__certificate_provider",
    "changed": true,
    "rc": 0,
    "results": [
        "Installed: certmonger-0.79.20-3.el10.x86_64",
        "Installed: python3-packaging-23.2-6.el10.noarch",
        "Installed: nspr-4.35.0-34.el10.x86_64",
        "Installed: nss-3.101.0-13.el10.x86_64",
        "Installed: dbus-tools-1:1.14.10-5.el10.x86_64",
        "Installed: nss-softokn-3.101.0-13.el10.x86_64",
        "Installed: nss-softokn-freebl-3.101.0-13.el10.x86_64",
        "Installed: nss-sysinit-3.101.0-13.el10.x86_64",
        "Installed: nss-util-3.101.0-13.el10.x86_64"
    ]
}
lsrpackages: certmonger python3-packaging

TASK [fedora.linux_system_roles.certificate : Ensure pre-scripts hooks directory exists] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:35
Saturday 14 December 2024  11:31:29 -0500 (0:00:02.895)       0:01:24.788 ***** 
changed: [managed-node1] => (item=certmonger) => {
    "__certificate_provider": "certmonger",
    "ansible_loop_var": "__certificate_provider",
    "changed": true,
    "gid": 0,
    "group": "root",
    "mode": "0700",
    "owner": "root",
    "path": "/etc/certmonger//pre-scripts",
    "secontext": "unconfined_u:object_r:etc_t:s0",
    "size": 6,
    "state": "directory",
    "uid": 0
}

TASK [fedora.linux_system_roles.certificate : Ensure post-scripts hooks directory exists] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:61
Saturday 14 December 2024  11:31:29 -0500 (0:00:00.433)       0:01:25.222 ***** 
changed: [managed-node1] => (item=certmonger) => {
    "__certificate_provider": "certmonger",
    "ansible_loop_var": "__certificate_provider",
    "changed": true,
    "gid": 0,
    "group": "root",
    "mode": "0700",
    "owner": "root",
    "path": "/etc/certmonger//post-scripts",
    "secontext": "unconfined_u:object_r:etc_t:s0",
    "size": 6,
    "state": "directory",
    "uid": 0
}

TASK [fedora.linux_system_roles.certificate : Ensure provider service is running] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:90
Saturday 14 December 2024  11:31:29 -0500 (0:00:00.413)       0:01:25.635 ***** 
changed: [managed-node1] => (item=certmonger) => {
    "__certificate_provider": "certmonger",
    "ansible_loop_var": "__certificate_provider",
    "changed": true,
    "enabled": true,
    "name": "certmonger",
    "state": "started",
    "status": {
        "AccessSELinuxContext": "system_u:object_r:certmonger_unit_file_t:s0",
        "ActiveEnterTimestampMonotonic": "0",
        "ActiveExitTimestampMonotonic": "0",
        "ActiveState": "inactive",
        "After": "systemd-journald.socket system.slice sysinit.target syslog.target dbus-broker.service dbus.socket basic.target network.target",
        "AllowIsolate": "no",
        "AssertResult": "no",
        "AssertTimestampMonotonic": "0",
        "Before": "shutdown.target",
        "BlockIOAccounting": "no",
        "BlockIOWeight": "[not set]",
        "BusName": "org.fedorahosted.certmonger",
        "CPUAccounting": "yes",
        "CPUAffinityFromNUMA": "no",
        "CPUQuotaPerSecUSec": "infinity",
        "CPUQuotaPeriodUSec": "infinity",
        "CPUSchedulingPolicy": "0",
        "CPUSchedulingPriority": "0",
        "CPUSchedulingResetOnFork": "no",
        "CPUShares": "[not set]",
        "CPUUsageNSec": "[not set]",
        "CPUWeight": "[not set]",
        "CacheDirectoryMode": "0755",
        "CanFreeze": "yes",
        "CanIsolate": "no",
        "CanReload": "no",
        "CanStart": "yes",
        "CanStop": "yes",
        "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore",
        "CleanResult": "success",
        "CollectMode": "inactive",
        "ConditionResult": "no",
        "ConditionTimestampMonotonic": "0",
        "ConfigurationDirectoryMode": "0755",
        "Conflicts": "shutdown.target",
        "ControlGroupId": "0",
        "ControlPID": "0",
        "CoredumpFilter": "0x33",
        "CoredumpReceive": "no",
        "DefaultDependencies": "yes",
        "DefaultMemoryLow": "0",
        "DefaultMemoryMin": "0",
        "DefaultStartupMemoryLow": "0",
        "Delegate": "no",
        "Description": "Certificate monitoring and PKI enrollment",
        "DevicePolicy": "auto",
        "DynamicUser": "no",
        "EffectiveMemoryHigh": "3698233344",
        "EffectiveMemoryMax": "3698233344",
        "EffectiveTasksMax": "22361",
        "EnvironmentFiles": "/etc/sysconfig/certmonger (ignore_errors=yes)",
        "ExecMainCode": "0",
        "ExecMainExitTimestampMonotonic": "0",
        "ExecMainHandoffTimestampMonotonic": "0",
        "ExecMainPID": "0",
        "ExecMainStartTimestampMonotonic": "0",
        "ExecMainStatus": "0",
        "ExecStart": "{ path=/usr/sbin/certmonger ; argv[]=/usr/sbin/certmonger -S -p /run/certmonger.pid -n $OPTS ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
        "ExecStartEx": "{ path=/usr/sbin/certmonger ; argv[]=/usr/sbin/certmonger -S -p /run/certmonger.pid -n $OPTS ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
        "ExitType": "main",
        "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
        "FailureAction": "none",
        "FileDescriptorStoreMax": "0",
        "FileDescriptorStorePreserve": "restart",
        "FinalKillSignal": "9",
        "FragmentPath": "/usr/lib/systemd/system/certmonger.service",
        "FreezerState": "running",
        "GID": "[not set]",
        "GuessMainPID": "yes",
        "IOAccounting": "no",
        "IOReadBytes": "[not set]",
        "IOReadOperations": "[not set]",
        "IOSchedulingClass": "2",
        "IOSchedulingPriority": "4",
        "IOWeight": "[not set]",
        "IOWriteBytes": "[not set]",
        "IOWriteOperations": "[not set]",
        "IPAccounting": "no",
        "IPEgressBytes": "[no data]",
        "IPEgressPackets": "[no data]",
        "IPIngressBytes": "[no data]",
        "IPIngressPackets": "[no data]",
        "Id": "certmonger.service",
        "IgnoreOnIsolate": "no",
        "IgnoreSIGPIPE": "yes",
        "InactiveEnterTimestampMonotonic": "0",
        "InactiveExitTimestampMonotonic": "0",
        "JobRunningTimeoutUSec": "infinity",
        "JobTimeoutAction": "none",
        "JobTimeoutUSec": "infinity",
        "KeyringMode": "private",
        "KillMode": "control-group",
        "KillSignal": "15",
        "LimitAS": "infinity",
        "LimitASSoft": "infinity",
        "LimitCORE": "infinity",
        "LimitCORESoft": "infinity",
        "LimitCPU": "infinity",
        "LimitCPUSoft": "infinity",
        "LimitDATA": "infinity",
        "LimitDATASoft": "infinity",
        "LimitFSIZE": "infinity",
        "LimitFSIZESoft": "infinity",
        "LimitLOCKS": "infinity",
        "LimitLOCKSSoft": "infinity",
        "LimitMEMLOCK": "8388608",
        "LimitMEMLOCKSoft": "8388608",
        "LimitMSGQUEUE": "819200",
        "LimitMSGQUEUESoft": "819200",
        "LimitNICE": "0",
        "LimitNICESoft": "0",
        "LimitNOFILE": "524288",
        "LimitNOFILESoft": "1024",
        "LimitNPROC": "13976",
        "LimitNPROCSoft": "13976",
        "LimitRSS": "infinity",
        "LimitRSSSoft": "infinity",
        "LimitRTPRIO": "0",
        "LimitRTPRIOSoft": "0",
        "LimitRTTIME": "infinity",
        "LimitRTTIMESoft": "infinity",
        "LimitSIGPENDING": "13976",
        "LimitSIGPENDINGSoft": "13976",
        "LimitSTACK": "infinity",
        "LimitSTACKSoft": "8388608",
        "LoadState": "loaded",
        "LockPersonality": "no",
        "LogLevelMax": "-1",
        "LogRateLimitBurst": "0",
        "LogRateLimitIntervalUSec": "0",
        "LogsDirectoryMode": "0755",
        "MainPID": "0",
        "ManagedOOMMemoryPressure": "auto",
        "ManagedOOMMemoryPressureLimit": "0",
        "ManagedOOMPreference": "none",
        "ManagedOOMSwap": "auto",
        "MemoryAccounting": "yes",
        "MemoryAvailable": "3220156416",
        "MemoryCurrent": "[not set]",
        "MemoryDenyWriteExecute": "no",
        "MemoryHigh": "infinity",
        "MemoryKSM": "no",
        "MemoryLimit": "infinity",
        "MemoryLow": "0",
        "MemoryMax": "infinity",
        "MemoryMin": "0",
        "MemoryPeak": "[not set]",
        "MemoryPressureThresholdUSec": "200ms",
        "MemoryPressureWatch": "auto",
        "MemorySwapCurrent": "[not set]",
        "MemorySwapMax": "infinity",
        "MemorySwapPeak": "[not set]",
        "MemoryZSwapCurrent": "[not set]",
        "MemoryZSwapMax": "infinity",
        "MemoryZSwapWriteback": "yes",
        "MountAPIVFS": "no",
        "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
        "NFileDescriptorStore": "0",
        "NRestarts": "0",
        "NUMAPolicy": "n/a",
        "Names": "certmonger.service",
        "NeedDaemonReload": "no",
        "Nice": "0",
        "NoNewPrivileges": "no",
        "NonBlocking": "no",
        "NotifyAccess": "none",
        "OOMPolicy": "stop",
        "OOMScoreAdjust": "0",
        "OnFailureJobMode": "replace",
        "OnSuccessJobMode": "fail",
        "PIDFile": "/run/certmonger.pid",
        "PartOf": "dbus-broker.service",
        "Perpetual": "no",
        "PrivateDevices": "no",
        "PrivateIPC": "no",
        "PrivateMounts": "no",
        "PrivateNetwork": "no",
        "PrivateTmp": "no",
        "PrivateUsers": "no",
        "ProcSubset": "all",
        "ProtectClock": "no",
        "ProtectControlGroups": "no",
        "ProtectHome": "no",
        "ProtectHostname": "no",
        "ProtectKernelLogs": "no",
        "ProtectKernelModules": "no",
        "ProtectKernelTunables": "no",
        "ProtectProc": "default",
        "ProtectSystem": "no",
        "RefuseManualStart": "no",
        "RefuseManualStop": "no",
        "ReloadResult": "success",
        "ReloadSignal": "1",
        "RemainAfterExit": "no",
        "RemoveIPC": "no",
        "Requires": "system.slice sysinit.target dbus.socket",
        "Restart": "no",
        "RestartKillSignal": "15",
        "RestartMaxDelayUSec": "infinity",
        "RestartMode": "normal",
        "RestartSteps": "0",
        "RestartUSec": "100ms",
        "RestartUSecNext": "100ms",
        "RestrictNamespaces": "no",
        "RestrictRealtime": "no",
        "RestrictSUIDSGID": "no",
        "Result": "success",
        "RootDirectoryStartOnly": "no",
        "RootEphemeral": "no",
        "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
        "RuntimeDirectoryMode": "0755",
        "RuntimeDirectoryPreserve": "no",
        "RuntimeMaxUSec": "infinity",
        "RuntimeRandomizedExtraUSec": "0",
        "SameProcessGroup": "no",
        "SecureBits": "0",
        "SendSIGHUP": "no",
        "SendSIGKILL": "yes",
        "SetLoginEnvironment": "no",
        "Slice": "system.slice",
        "StandardError": "inherit",
        "StandardInput": "null",
        "StandardOutput": "journal",
        "StartLimitAction": "none",
        "StartLimitBurst": "5",
        "StartLimitIntervalUSec": "10s",
        "StartupBlockIOWeight": "[not set]",
        "StartupCPUShares": "[not set]",
        "StartupCPUWeight": "[not set]",
        "StartupIOWeight": "[not set]",
        "StartupMemoryHigh": "infinity",
        "StartupMemoryLow": "0",
        "StartupMemoryMax": "infinity",
        "StartupMemorySwapMax": "infinity",
        "StartupMemoryZSwapMax": "infinity",
        "StateChangeTimestampMonotonic": "0",
        "StateDirectoryMode": "0755",
        "StatusErrno": "0",
        "StopWhenUnneeded": "no",
        "SubState": "dead",
        "SuccessAction": "none",
        "SurviveFinalKillSignal": "no",
        "SyslogFacility": "3",
        "SyslogLevel": "6",
        "SyslogLevelPrefix": "yes",
        "SyslogPriority": "30",
        "SystemCallErrorNumber": "2147483646",
        "TTYReset": "no",
        "TTYVHangup": "no",
        "TTYVTDisallocate": "no",
        "TasksAccounting": "yes",
        "TasksCurrent": "[not set]",
        "TasksMax": "22361",
        "TimeoutAbortUSec": "1min 30s",
        "TimeoutCleanUSec": "infinity",
        "TimeoutStartFailureMode": "terminate",
        "TimeoutStartUSec": "1min 30s",
        "TimeoutStopFailureMode": "terminate",
        "TimeoutStopUSec": "1min 30s",
        "TimerSlackNSec": "50000",
        "Transient": "no",
        "Type": "dbus",
        "UID": "[not set]",
        "UMask": "0022",
        "UnitFilePreset": "disabled",
        "UnitFileState": "disabled",
        "UtmpMode": "init",
        "WatchdogSignal": "6",
        "WatchdogTimestampMonotonic": "0",
        "WatchdogUSec": "infinity"
    }
}

TASK [fedora.linux_system_roles.certificate : Ensure certificate requests] *****
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:101
Saturday 14 December 2024  11:31:31 -0500 (0:00:01.348)       0:01:26.984 ***** 
changed: [managed-node1] => (item={'name': 'podman_registry', 'dns': ['localhost', '127.0.0.1'], 'ca': 'self-sign'}) => {
    "ansible_loop_var": "item",
    "changed": true,
    "item": {
        "ca": "self-sign",
        "dns": [
            "localhost",
            "127.0.0.1"
        ],
        "name": "podman_registry"
    }
}

MSG:

Certificate requested (new).

TASK [fedora.linux_system_roles.certificate : Slurp the contents of the files] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:152
Saturday 14 December 2024  11:31:32 -0500 (0:00:01.003)       0:01:27.987 ***** 
ok: [managed-node1] => (item=['cert', {'name': 'podman_registry', 'dns': ['localhost', '127.0.0.1'], 'ca': 'self-sign'}]) => {
    "ansible_loop_var": "item",
    "changed": false,
    "content": "LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSURqVENDQW5XZ0F3SUJBZ0lRRG1PYzZEZlFRMUt0THp6a21rUXc4akFOQmdrcWhraUc5dzBCQVFzRkFEQlEKTVNBd0hnWURWUVFEREJkTWIyTmhiQ0JUYVdkdWFXNW5JRUYxZEdodmNtbDBlVEVzTUNvR0ExVUVBd3dqTUdVMgpNemxqWlRndE16ZGtNRFF6TlRJdFlXUXlaak5qWlRRdE9XRTBORE13WmpFd0hoY05NalF4TWpFME1UWXpNVE15CldoY05NalV4TWpFME1UWXpNVE14V2pBVU1SSXdFQVlEVlFRREV3bHNiMk5oYkdodmMzUXdnZ0VpTUEwR0NTcUcKU0liM0RRRUJBUVVBQTRJQkR3QXdnZ0VLQW9JQkFRQzF2Wm9qa3FiUTcxOXl2TXNaL3ArSjh3QllZSk5DRDhERAo5V3RJQ0JkTnFTVG8velBJd25PeXVaUkRCQUJoNUVuOGdxbG1sZXZhSWFVSW9ERy82OTdlLzhxRURKd3ljeG1yCkV4a3JOR0xPNEFQM1V6YWxIZjlyanc4YytrUUxpdzFLdWM3aE5Vd0NJR0pMNVFCdVF5M2NDNmxXbXpUb2dVNWYKY0RiZnVEZGdSbnIwSFhqa1ZkbVVXQkxOVnNFQ2U1Kys0OWRzaStENzdjQlBqVjIveE5hYVpFRmkzcU9mWnVuYQpGWklYNWRLblUwZC9wMmRnSzl2MlJFc1FzWmhpeVdFKzNIekNja3ovVldlQnVOOU5PNlRIdStCWjZiZk8wUy9OCkhzbWx6aDFhTm1ISmc5NVdLSks4djViTUxpQkFOa0pRMzZwSjlhbzdwWEk5OGVDU2RET2ZBZ01CQUFHamdaNHcKZ1pzd0N3WURWUjBQQkFRREFnV2dNQjhHQTFVZEVRUVlNQmFDQ1d4dlkyRnNhRzl6ZElJSk1USTNMakF1TUM0eApNQjBHQTFVZEpRUVdNQlFHQ0NzR0FRVUZCd01CQmdnckJnRUZCUWNEQWpBTUJnTlZIUk1CQWY4RUFqQUFNQjBHCkExVWREZ1FXQkJRZVlpQ1dVUjNJUTVIaXpOa052ZUtKeHh0TzF6QWZCZ05WSFNNRUdEQVdnQlFPMmFuVEpqVXgKZmJEM1pHb001SEQwUWU1R1pUQU5CZ2txaGtpRzl3MEJBUXNGQUFPQ0FRRUFhZ1hBTy9tdTY5dHROSE5ublhNSApPSlJCaDhEclh4S0ZqQUUyM0I0RzlyS05PM0NmMUxNdW12ZTZjcFBaZHIrdDJFYTZPb3A4YXRqU050czV4WTdWCldiejBLditJdHNmMmlKRXZ0b053VWpzUWNJMDd6U0ErT0phR1lCaVl3WllSVDdGMmZrMTNGRW5GV3lFaVZrZXUKYjNhOVpQM3RUczFweVl0OXNoRkxZdWd0UGFrdDFmT3Q0aHZGVG9malJSRlVVU1FKUlBnRUQvR0ZDMDcyRkZQTwpJWWw2VjdsM0kwNUNHdUhQUjNDdzhRclVWa3NFYmtoaDB0YzRiK00zMlI2TmNPL1F3T3N2TG1Ba0dPTXVjK0xBCllnMVB0SmdhZnJ1a2pEUlcxZVpVUVZPZ1dnRFMrSnpqK1cvNUF6eE5lbElDWjhzUUVuM0VxL2dJN014TWxjalEKTGc9PQotLS0tLUVORCBDRVJUSUZJQ0FURS0tLS0tCg==",
    "encoding": "base64",
    "item": [
        "cert",
        {
            "ca": "self-sign",
            "dns": [
                "localhost",
                "127.0.0.1"
            ],
            "name": "podman_registry"
        }
    ],
    "source": "/etc/pki/tls/certs/podman_registry.crt"
}
ok: [managed-node1] => (item=['key', {'name': 'podman_registry', 'dns': ['localhost', '127.0.0.1'], 'ca': 'self-sign'}]) => {
    "ansible_loop_var": "item",
    "changed": false,
    "content": "LS0tLS1CRUdJTiBQUklWQVRFIEtFWS0tLS0tCk1JSUV2Z0lCQURBTkJna3Foa2lHOXcwQkFRRUZBQVNDQktnd2dnU2tBZ0VBQW9JQkFRQzF2Wm9qa3FiUTcxOXkKdk1zWi9wK0o4d0JZWUpOQ0Q4REQ5V3RJQ0JkTnFTVG8velBJd25PeXVaUkRCQUJoNUVuOGdxbG1sZXZhSWFVSQpvREcvNjk3ZS84cUVESnd5Y3htckV4a3JOR0xPNEFQM1V6YWxIZjlyanc4YytrUUxpdzFLdWM3aE5Vd0NJR0pMCjVRQnVReTNjQzZsV216VG9nVTVmY0RiZnVEZGdSbnIwSFhqa1ZkbVVXQkxOVnNFQ2U1Kys0OWRzaStENzdjQlAKalYyL3hOYWFaRUZpM3FPZlp1bmFGWklYNWRLblUwZC9wMmRnSzl2MlJFc1FzWmhpeVdFKzNIekNja3ovVldlQgp1TjlOTzZUSHUrQlo2YmZPMFMvTkhzbWx6aDFhTm1ISmc5NVdLSks4djViTUxpQkFOa0pRMzZwSjlhbzdwWEk5CjhlQ1NkRE9mQWdNQkFBRUNnZ0VBUmMrM3dudG1iZkxOZU44UnE0SVdHYUVKSEdtNGwxTkN5bU5mR3ZQVXM4WEsKQW1CcHJYQVlqbXBnQytBaEgzbFlmRkZhcmlmZnlkSkx1VVVENTBPOVFHOEVoWFhzbUNxRGpTWVFjSHEyaTk0YwpWK2lVb0plbDBsVEpTdW5Oc3B0dTl5bnAwTE91NTFBMlFnNE0vTTM4Y3o3ZGhRR29zTGxlK0NVOFJ2TFYvcnBlClZJZHNsTG5KUDl0SUxCQ1lEVks4ckNFdDRxSGluZEhCTVRVTW5aendjbmxPMWVqWDZLZU53YmFHd2xGY1B2bTIKbTdnL21OT0dVUVBCZVh2WnhNM1VSV25NK04yV3d2VGRCenBEdzBVV0xGZTI0UW5HeVNEdVg3MHFpVm9WVmVPTQpXaVNwMllwVURPRFpNQnpiNkYvd3NjTlBZSU4ycXNmZ29JQm1FOVVGUlFLQmdRRGZ3WVVvTXgvYjAxUEptM0VWCmxrRGxDb2NtWG9LNjhMdG04YTNheVJqcUE0MTRWYzN1RXU2OHgzSnRiaklCc3prbjQrYlppVHZvSEVzYW4zMDEKaTBadUpvSjY2VVFwUnkrS3dNblo0dkpEdzhKd2JTWEFHWHpqS1RNUnBOMU1LRXNaalRnVmdwSjd5c2loT3M4QwpxS0xuSnhVYytWN2RFcUpVZ0NYWlJXMDU0d0tCZ1FEUDdoMDlDZ0RJTm03YUZGK3Z2dFdpVU1TbzF4V3Q3Tms2ClNSUi93amdlWnkzbWM5b1dqNm16T1pKUzdhNWNJaXhYSmE1ZVhxNnRHRkx5T2NlNDg1ZWZXSDgrQkVxS1ViYk8KZlp5ZDJUT2d4VnJNV1RpNjUxUUNjTGwwb084S2I4c3RGbDdaK1hnYStGSkVKanhBT3dEa0dPS2ErOWVTTm9hKwpzcWdVaWVIOEZRS0JnQmRCNmpGUGZ4UStWd3N4bVFPQlpxVm4vcnhDUHB4bndWTGw5QWcxdHQwOEdVeFV2dDdzCklOeUdicUpwUGRGbmE1MTExdHp0b3NoeGlHenYvdmU5emtoTXdHcytLOU9PanFhQmUzbW1XVUFwN04vNnVGM0wKU0d2a01HYTVHRGQrU0xBVE9hMGlzVThrdU0zcGF2TC96RTd5aFFOSitFNUtUWTB5OUx3c3R6bTVBb0dCQUlKWApaMml0N3BPUzI2WkZBejV0eUFsNXkrMXk5cG9TVUpHaTRnQzhST1Y2RkdIVDJiQXUybGdKRE5xRE43dDFpSW9RCml0K0pSc0ltNzBmOFJHQ0NBbGZXNTl4K1lDT1kyWmxUVUd2NmpjeUlLa1RTNFNiNGY4Y1AxcEZ2bUtqWUwyclMKRFI5aXJKRnJmRlpRQ1pEUjY5c1FabXgwMWt0SGR1dnZua0ZSdGFHSkFvR0JBS0Z2MDBZWS9uL01WaEpiZ25IUgowMWJmV1c4T2s3cnlnWExJUEgyd29DUjNOSzFNKy85QVcxUFBuanZDaUw2R0dTNDJxd0hyZ0d1akc0RmJrbEhkClQzYkR2Z3U1dGl5ZS9xK0paa2FJcHlKY280dWRXUWYwRmRaK3A2M3A1aldxNzlJbHdGdU5ZaWhieW41VHlFQzcKSGtqNkgrZFlSZEFSUUtHQmxLZ25qZXB2Ci0tLS0tRU5EIFBSSVZBVEUgS0VZLS0tLS0K",
    "encoding": "base64",
    "item": [
        "key",
        {
            "ca": "self-sign",
            "dns": [
                "localhost",
                "127.0.0.1"
            ],
            "name": "podman_registry"
        }
    ],
    "source": "/etc/pki/tls/private/podman_registry.key"
}
ok: [managed-node1] => (item=['ca', {'name': 'podman_registry', 'dns': ['localhost', '127.0.0.1'], 'ca': 'self-sign'}]) => {
    "ansible_loop_var": "item",
    "changed": false,
    "content": "LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSURqVENDQW5XZ0F3SUJBZ0lRRG1PYzZEZlFRMUt0THp6a21rUXc4akFOQmdrcWhraUc5dzBCQVFzRkFEQlEKTVNBd0hnWURWUVFEREJkTWIyTmhiQ0JUYVdkdWFXNW5JRUYxZEdodmNtbDBlVEVzTUNvR0ExVUVBd3dqTUdVMgpNemxqWlRndE16ZGtNRFF6TlRJdFlXUXlaak5qWlRRdE9XRTBORE13WmpFd0hoY05NalF4TWpFME1UWXpNVE15CldoY05NalV4TWpFME1UWXpNVE14V2pBVU1SSXdFQVlEVlFRREV3bHNiMk5oYkdodmMzUXdnZ0VpTUEwR0NTcUcKU0liM0RRRUJBUVVBQTRJQkR3QXdnZ0VLQW9JQkFRQzF2Wm9qa3FiUTcxOXl2TXNaL3ArSjh3QllZSk5DRDhERAo5V3RJQ0JkTnFTVG8velBJd25PeXVaUkRCQUJoNUVuOGdxbG1sZXZhSWFVSW9ERy82OTdlLzhxRURKd3ljeG1yCkV4a3JOR0xPNEFQM1V6YWxIZjlyanc4YytrUUxpdzFLdWM3aE5Vd0NJR0pMNVFCdVF5M2NDNmxXbXpUb2dVNWYKY0RiZnVEZGdSbnIwSFhqa1ZkbVVXQkxOVnNFQ2U1Kys0OWRzaStENzdjQlBqVjIveE5hYVpFRmkzcU9mWnVuYQpGWklYNWRLblUwZC9wMmRnSzl2MlJFc1FzWmhpeVdFKzNIekNja3ovVldlQnVOOU5PNlRIdStCWjZiZk8wUy9OCkhzbWx6aDFhTm1ISmc5NVdLSks4djViTUxpQkFOa0pRMzZwSjlhbzdwWEk5OGVDU2RET2ZBZ01CQUFHamdaNHcKZ1pzd0N3WURWUjBQQkFRREFnV2dNQjhHQTFVZEVRUVlNQmFDQ1d4dlkyRnNhRzl6ZElJSk1USTNMakF1TUM0eApNQjBHQTFVZEpRUVdNQlFHQ0NzR0FRVUZCd01CQmdnckJnRUZCUWNEQWpBTUJnTlZIUk1CQWY4RUFqQUFNQjBHCkExVWREZ1FXQkJRZVlpQ1dVUjNJUTVIaXpOa052ZUtKeHh0TzF6QWZCZ05WSFNNRUdEQVdnQlFPMmFuVEpqVXgKZmJEM1pHb001SEQwUWU1R1pUQU5CZ2txaGtpRzl3MEJBUXNGQUFPQ0FRRUFhZ1hBTy9tdTY5dHROSE5ublhNSApPSlJCaDhEclh4S0ZqQUUyM0I0RzlyS05PM0NmMUxNdW12ZTZjcFBaZHIrdDJFYTZPb3A4YXRqU050czV4WTdWCldiejBLditJdHNmMmlKRXZ0b053VWpzUWNJMDd6U0ErT0phR1lCaVl3WllSVDdGMmZrMTNGRW5GV3lFaVZrZXUKYjNhOVpQM3RUczFweVl0OXNoRkxZdWd0UGFrdDFmT3Q0aHZGVG9malJSRlVVU1FKUlBnRUQvR0ZDMDcyRkZQTwpJWWw2VjdsM0kwNUNHdUhQUjNDdzhRclVWa3NFYmtoaDB0YzRiK00zMlI2TmNPL1F3T3N2TG1Ba0dPTXVjK0xBCllnMVB0SmdhZnJ1a2pEUlcxZVpVUVZPZ1dnRFMrSnpqK1cvNUF6eE5lbElDWjhzUUVuM0VxL2dJN014TWxjalEKTGc9PQotLS0tLUVORCBDRVJUSUZJQ0FURS0tLS0tCg==",
    "encoding": "base64",
    "item": [
        "ca",
        {
            "ca": "self-sign",
            "dns": [
                "localhost",
                "127.0.0.1"
            ],
            "name": "podman_registry"
        }
    ],
    "source": "/etc/pki/tls/certs/podman_registry.crt"
}

TASK [fedora.linux_system_roles.certificate : Create return data] **************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:160
Saturday 14 December 2024  11:31:33 -0500 (0:00:01.382)       0:01:29.370 ***** 
ok: [managed-node1] => {
    "ansible_facts": {
        "certificate_test_certs": {
            "podman_registry": {
                "ca": "/etc/pki/tls/certs/podman_registry.crt",
                "ca_content": "-----BEGIN CERTIFICATE-----\nMIIDjTCCAnWgAwIBAgIQDmOc6DfQQ1KtLzzkmkQw8jANBgkqhkiG9w0BAQsFADBQ\nMSAwHgYDVQQDDBdMb2NhbCBTaWduaW5nIEF1dGhvcml0eTEsMCoGA1UEAwwjMGU2\nMzljZTgtMzdkMDQzNTItYWQyZjNjZTQtOWE0NDMwZjEwHhcNMjQxMjE0MTYzMTMy\nWhcNMjUxMjE0MTYzMTMxWjAUMRIwEAYDVQQDEwlsb2NhbGhvc3QwggEiMA0GCSqG\nSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC1vZojkqbQ719yvMsZ/p+J8wBYYJNCD8DD\n9WtICBdNqSTo/zPIwnOyuZRDBABh5En8gqlmlevaIaUIoDG/697e/8qEDJwycxmr\nExkrNGLO4AP3UzalHf9rjw8c+kQLiw1Kuc7hNUwCIGJL5QBuQy3cC6lWmzTogU5f\ncDbfuDdgRnr0HXjkVdmUWBLNVsECe5++49dsi+D77cBPjV2/xNaaZEFi3qOfZuna\nFZIX5dKnU0d/p2dgK9v2REsQsZhiyWE+3HzCckz/VWeBuN9NO6THu+BZ6bfO0S/N\nHsmlzh1aNmHJg95WKJK8v5bMLiBANkJQ36pJ9ao7pXI98eCSdDOfAgMBAAGjgZ4w\ngZswCwYDVR0PBAQDAgWgMB8GA1UdEQQYMBaCCWxvY2FsaG9zdIIJMTI3LjAuMC4x\nMB0GA1UdJQQWMBQGCCsGAQUFBwMBBggrBgEFBQcDAjAMBgNVHRMBAf8EAjAAMB0G\nA1UdDgQWBBQeYiCWUR3IQ5HizNkNveKJxxtO1zAfBgNVHSMEGDAWgBQO2anTJjUx\nfbD3ZGoM5HD0Qe5GZTANBgkqhkiG9w0BAQsFAAOCAQEAagXAO/mu69ttNHNnnXMH\nOJRBh8DrXxKFjAE23B4G9rKNO3Cf1LMumve6cpPZdr+t2Ea6Oop8atjSNts5xY7V\nWbz0Kv+Itsf2iJEvtoNwUjsQcI07zSA+OJaGYBiYwZYRT7F2fk13FEnFWyEiVkeu\nb3a9ZP3tTs1pyYt9shFLYugtPakt1fOt4hvFTofjRRFUUSQJRPgED/GFC072FFPO\nIYl6V7l3I05CGuHPR3Cw8QrUVksEbkhh0tc4b+M32R6NcO/QwOsvLmAkGOMuc+LA\nYg1PtJgafrukjDRW1eZUQVOgWgDS+Jzj+W/5AzxNelICZ8sQEn3Eq/gI7MxMlcjQ\nLg==\n-----END CERTIFICATE-----\n",
                "cert": "/etc/pki/tls/certs/podman_registry.crt",
                "cert_content": "-----BEGIN CERTIFICATE-----\nMIIDjTCCAnWgAwIBAgIQDmOc6DfQQ1KtLzzkmkQw8jANBgkqhkiG9w0BAQsFADBQ\nMSAwHgYDVQQDDBdMb2NhbCBTaWduaW5nIEF1dGhvcml0eTEsMCoGA1UEAwwjMGU2\nMzljZTgtMzdkMDQzNTItYWQyZjNjZTQtOWE0NDMwZjEwHhcNMjQxMjE0MTYzMTMy\nWhcNMjUxMjE0MTYzMTMxWjAUMRIwEAYDVQQDEwlsb2NhbGhvc3QwggEiMA0GCSqG\nSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC1vZojkqbQ719yvMsZ/p+J8wBYYJNCD8DD\n9WtICBdNqSTo/zPIwnOyuZRDBABh5En8gqlmlevaIaUIoDG/697e/8qEDJwycxmr\nExkrNGLO4AP3UzalHf9rjw8c+kQLiw1Kuc7hNUwCIGJL5QBuQy3cC6lWmzTogU5f\ncDbfuDdgRnr0HXjkVdmUWBLNVsECe5++49dsi+D77cBPjV2/xNaaZEFi3qOfZuna\nFZIX5dKnU0d/p2dgK9v2REsQsZhiyWE+3HzCckz/VWeBuN9NO6THu+BZ6bfO0S/N\nHsmlzh1aNmHJg95WKJK8v5bMLiBANkJQ36pJ9ao7pXI98eCSdDOfAgMBAAGjgZ4w\ngZswCwYDVR0PBAQDAgWgMB8GA1UdEQQYMBaCCWxvY2FsaG9zdIIJMTI3LjAuMC4x\nMB0GA1UdJQQWMBQGCCsGAQUFBwMBBggrBgEFBQcDAjAMBgNVHRMBAf8EAjAAMB0G\nA1UdDgQWBBQeYiCWUR3IQ5HizNkNveKJxxtO1zAfBgNVHSMEGDAWgBQO2anTJjUx\nfbD3ZGoM5HD0Qe5GZTANBgkqhkiG9w0BAQsFAAOCAQEAagXAO/mu69ttNHNnnXMH\nOJRBh8DrXxKFjAE23B4G9rKNO3Cf1LMumve6cpPZdr+t2Ea6Oop8atjSNts5xY7V\nWbz0Kv+Itsf2iJEvtoNwUjsQcI07zSA+OJaGYBiYwZYRT7F2fk13FEnFWyEiVkeu\nb3a9ZP3tTs1pyYt9shFLYugtPakt1fOt4hvFTofjRRFUUSQJRPgED/GFC072FFPO\nIYl6V7l3I05CGuHPR3Cw8QrUVksEbkhh0tc4b+M32R6NcO/QwOsvLmAkGOMuc+LA\nYg1PtJgafrukjDRW1eZUQVOgWgDS+Jzj+W/5AzxNelICZ8sQEn3Eq/gI7MxMlcjQ\nLg==\n-----END CERTIFICATE-----\n",
                "key": "/etc/pki/tls/private/podman_registry.key",
                "key_content": "-----BEGIN PRIVATE KEY-----\nMIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQC1vZojkqbQ719y\nvMsZ/p+J8wBYYJNCD8DD9WtICBdNqSTo/zPIwnOyuZRDBABh5En8gqlmlevaIaUI\noDG/697e/8qEDJwycxmrExkrNGLO4AP3UzalHf9rjw8c+kQLiw1Kuc7hNUwCIGJL\n5QBuQy3cC6lWmzTogU5fcDbfuDdgRnr0HXjkVdmUWBLNVsECe5++49dsi+D77cBP\njV2/xNaaZEFi3qOfZunaFZIX5dKnU0d/p2dgK9v2REsQsZhiyWE+3HzCckz/VWeB\nuN9NO6THu+BZ6bfO0S/NHsmlzh1aNmHJg95WKJK8v5bMLiBANkJQ36pJ9ao7pXI9\n8eCSdDOfAgMBAAECggEARc+3wntmbfLNeN8Rq4IWGaEJHGm4l1NCymNfGvPUs8XK\nAmBprXAYjmpgC+AhH3lYfFFariffydJLuUUD50O9QG8EhXXsmCqDjSYQcHq2i94c\nV+iUoJel0lTJSunNsptu9ynp0LOu51A2Qg4M/M38cz7dhQGosLle+CU8RvLV/rpe\nVIdslLnJP9tILBCYDVK8rCEt4qHindHBMTUMnZzwcnlO1ejX6KeNwbaGwlFcPvm2\nm7g/mNOGUQPBeXvZxM3URWnM+N2WwvTdBzpDw0UWLFe24QnGySDuX70qiVoVVeOM\nWiSp2YpUDODZMBzb6F/wscNPYIN2qsfgoIBmE9UFRQKBgQDfwYUoMx/b01PJm3EV\nlkDlCocmXoK68Ltm8a3ayRjqA414Vc3uEu68x3JtbjIBszkn4+bZiTvoHEsan301\ni0ZuJoJ66UQpRy+KwMnZ4vJDw8JwbSXAGXzjKTMRpN1MKEsZjTgVgpJ7ysihOs8C\nqKLnJxUc+V7dEqJUgCXZRW054wKBgQDP7h09CgDINm7aFF+vvtWiUMSo1xWt7Nk6\nSRR/wjgeZy3mc9oWj6mzOZJS7a5cIixXJa5eXq6tGFLyOce485efWH8+BEqKUbbO\nfZyd2TOgxVrMWTi651QCcLl0oO8Kb8stFl7Z+Xga+FJEJjxAOwDkGOKa+9eSNoa+\nsqgUieH8FQKBgBdB6jFPfxQ+VwsxmQOBZqVn/rxCPpxnwVLl9Ag1tt08GUxUvt7s\nINyGbqJpPdFna5111tztoshxiGzv/ve9zkhMwGs+K9OOjqaBe3mmWUAp7N/6uF3L\nSGvkMGa5GDd+SLATOa0isU8kuM3pavL/zE7yhQNJ+E5KTY0y9Lwstzm5AoGBAIJX\nZ2it7pOS26ZFAz5tyAl5y+1y9poSUJGi4gC8ROV6FGHT2bAu2lgJDNqDN7t1iIoQ\nit+JRsIm70f8RGCCAlfW59x+YCOY2ZlTUGv6jcyIKkTS4Sb4f8cP1pFvmKjYL2rS\nDR9irJFrfFZQCZDR69sQZmx01ktHduvvnkFRtaGJAoGBAKFv00YY/n/MVhJbgnHR\n01bfWW8Ok7rygXLIPH2woCR3NK1M+/9AW1PPnjvCiL6GGS42qwHrgGujG4FbklHd\nT3bDvgu5tiye/q+JZkaIpyJco4udWQf0FdZ+p63p5jWq79IlwFuNYihbyn5TyEC7\nHkj6H+dYRdARQKGBlKgnjepv\n-----END PRIVATE KEY-----\n"
            }
        }
    },
    "changed": false
}

TASK [fedora.linux_system_roles.certificate : Stop tracking certificates] ******
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:176
Saturday 14 December 2024  11:31:33 -0500 (0:00:00.066)       0:01:29.436 ***** 
ok: [managed-node1] => (item={'cert': '/etc/pki/tls/certs/podman_registry.crt', 'cert_content': '-----BEGIN CERTIFICATE-----\nMIIDjTCCAnWgAwIBAgIQDmOc6DfQQ1KtLzzkmkQw8jANBgkqhkiG9w0BAQsFADBQ\nMSAwHgYDVQQDDBdMb2NhbCBTaWduaW5nIEF1dGhvcml0eTEsMCoGA1UEAwwjMGU2\nMzljZTgtMzdkMDQzNTItYWQyZjNjZTQtOWE0NDMwZjEwHhcNMjQxMjE0MTYzMTMy\nWhcNMjUxMjE0MTYzMTMxWjAUMRIwEAYDVQQDEwlsb2NhbGhvc3QwggEiMA0GCSqG\nSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC1vZojkqbQ719yvMsZ/p+J8wBYYJNCD8DD\n9WtICBdNqSTo/zPIwnOyuZRDBABh5En8gqlmlevaIaUIoDG/697e/8qEDJwycxmr\nExkrNGLO4AP3UzalHf9rjw8c+kQLiw1Kuc7hNUwCIGJL5QBuQy3cC6lWmzTogU5f\ncDbfuDdgRnr0HXjkVdmUWBLNVsECe5++49dsi+D77cBPjV2/xNaaZEFi3qOfZuna\nFZIX5dKnU0d/p2dgK9v2REsQsZhiyWE+3HzCckz/VWeBuN9NO6THu+BZ6bfO0S/N\nHsmlzh1aNmHJg95WKJK8v5bMLiBANkJQ36pJ9ao7pXI98eCSdDOfAgMBAAGjgZ4w\ngZswCwYDVR0PBAQDAgWgMB8GA1UdEQQYMBaCCWxvY2FsaG9zdIIJMTI3LjAuMC4x\nMB0GA1UdJQQWMBQGCCsGAQUFBwMBBggrBgEFBQcDAjAMBgNVHRMBAf8EAjAAMB0G\nA1UdDgQWBBQeYiCWUR3IQ5HizNkNveKJxxtO1zAfBgNVHSMEGDAWgBQO2anTJjUx\nfbD3ZGoM5HD0Qe5GZTANBgkqhkiG9w0BAQsFAAOCAQEAagXAO/mu69ttNHNnnXMH\nOJRBh8DrXxKFjAE23B4G9rKNO3Cf1LMumve6cpPZdr+t2Ea6Oop8atjSNts5xY7V\nWbz0Kv+Itsf2iJEvtoNwUjsQcI07zSA+OJaGYBiYwZYRT7F2fk13FEnFWyEiVkeu\nb3a9ZP3tTs1pyYt9shFLYugtPakt1fOt4hvFTofjRRFUUSQJRPgED/GFC072FFPO\nIYl6V7l3I05CGuHPR3Cw8QrUVksEbkhh0tc4b+M32R6NcO/QwOsvLmAkGOMuc+LA\nYg1PtJgafrukjDRW1eZUQVOgWgDS+Jzj+W/5AzxNelICZ8sQEn3Eq/gI7MxMlcjQ\nLg==\n-----END CERTIFICATE-----\n', 'key': '/etc/pki/tls/private/podman_registry.key', 'key_content': '-----BEGIN PRIVATE KEY-----\nMIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQC1vZojkqbQ719y\nvMsZ/p+J8wBYYJNCD8DD9WtICBdNqSTo/zPIwnOyuZRDBABh5En8gqlmlevaIaUI\noDG/697e/8qEDJwycxmrExkrNGLO4AP3UzalHf9rjw8c+kQLiw1Kuc7hNUwCIGJL\n5QBuQy3cC6lWmzTogU5fcDbfuDdgRnr0HXjkVdmUWBLNVsECe5++49dsi+D77cBP\njV2/xNaaZEFi3qOfZunaFZIX5dKnU0d/p2dgK9v2REsQsZhiyWE+3HzCckz/VWeB\nuN9NO6THu+BZ6bfO0S/NHsmlzh1aNmHJg95WKJK8v5bMLiBANkJQ36pJ9ao7pXI9\n8eCSdDOfAgMBAAECggEARc+3wntmbfLNeN8Rq4IWGaEJHGm4l1NCymNfGvPUs8XK\nAmBprXAYjmpgC+AhH3lYfFFariffydJLuUUD50O9QG8EhXXsmCqDjSYQcHq2i94c\nV+iUoJel0lTJSunNsptu9ynp0LOu51A2Qg4M/M38cz7dhQGosLle+CU8RvLV/rpe\nVIdslLnJP9tILBCYDVK8rCEt4qHindHBMTUMnZzwcnlO1ejX6KeNwbaGwlFcPvm2\nm7g/mNOGUQPBeXvZxM3URWnM+N2WwvTdBzpDw0UWLFe24QnGySDuX70qiVoVVeOM\nWiSp2YpUDODZMBzb6F/wscNPYIN2qsfgoIBmE9UFRQKBgQDfwYUoMx/b01PJm3EV\nlkDlCocmXoK68Ltm8a3ayRjqA414Vc3uEu68x3JtbjIBszkn4+bZiTvoHEsan301\ni0ZuJoJ66UQpRy+KwMnZ4vJDw8JwbSXAGXzjKTMRpN1MKEsZjTgVgpJ7ysihOs8C\nqKLnJxUc+V7dEqJUgCXZRW054wKBgQDP7h09CgDINm7aFF+vvtWiUMSo1xWt7Nk6\nSRR/wjgeZy3mc9oWj6mzOZJS7a5cIixXJa5eXq6tGFLyOce485efWH8+BEqKUbbO\nfZyd2TOgxVrMWTi651QCcLl0oO8Kb8stFl7Z+Xga+FJEJjxAOwDkGOKa+9eSNoa+\nsqgUieH8FQKBgBdB6jFPfxQ+VwsxmQOBZqVn/rxCPpxnwVLl9Ag1tt08GUxUvt7s\nINyGbqJpPdFna5111tztoshxiGzv/ve9zkhMwGs+K9OOjqaBe3mmWUAp7N/6uF3L\nSGvkMGa5GDd+SLATOa0isU8kuM3pavL/zE7yhQNJ+E5KTY0y9Lwstzm5AoGBAIJX\nZ2it7pOS26ZFAz5tyAl5y+1y9poSUJGi4gC8ROV6FGHT2bAu2lgJDNqDN7t1iIoQ\nit+JRsIm70f8RGCCAlfW59x+YCOY2ZlTUGv6jcyIKkTS4Sb4f8cP1pFvmKjYL2rS\nDR9irJFrfFZQCZDR69sQZmx01ktHduvvnkFRtaGJAoGBAKFv00YY/n/MVhJbgnHR\n01bfWW8Ok7rygXLIPH2woCR3NK1M+/9AW1PPnjvCiL6GGS42qwHrgGujG4FbklHd\nT3bDvgu5tiye/q+JZkaIpyJco4udWQf0FdZ+p63p5jWq79IlwFuNYihbyn5TyEC7\nHkj6H+dYRdARQKGBlKgnjepv\n-----END PRIVATE KEY-----\n', 'ca': '/etc/pki/tls/certs/podman_registry.crt', 'ca_content': '-----BEGIN CERTIFICATE-----\nMIIDjTCCAnWgAwIBAgIQDmOc6DfQQ1KtLzzkmkQw8jANBgkqhkiG9w0BAQsFADBQ\nMSAwHgYDVQQDDBdMb2NhbCBTaWduaW5nIEF1dGhvcml0eTEsMCoGA1UEAwwjMGU2\nMzljZTgtMzdkMDQzNTItYWQyZjNjZTQtOWE0NDMwZjEwHhcNMjQxMjE0MTYzMTMy\nWhcNMjUxMjE0MTYzMTMxWjAUMRIwEAYDVQQDEwlsb2NhbGhvc3QwggEiMA0GCSqG\nSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC1vZojkqbQ719yvMsZ/p+J8wBYYJNCD8DD\n9WtICBdNqSTo/zPIwnOyuZRDBABh5En8gqlmlevaIaUIoDG/697e/8qEDJwycxmr\nExkrNGLO4AP3UzalHf9rjw8c+kQLiw1Kuc7hNUwCIGJL5QBuQy3cC6lWmzTogU5f\ncDbfuDdgRnr0HXjkVdmUWBLNVsECe5++49dsi+D77cBPjV2/xNaaZEFi3qOfZuna\nFZIX5dKnU0d/p2dgK9v2REsQsZhiyWE+3HzCckz/VWeBuN9NO6THu+BZ6bfO0S/N\nHsmlzh1aNmHJg95WKJK8v5bMLiBANkJQ36pJ9ao7pXI98eCSdDOfAgMBAAGjgZ4w\ngZswCwYDVR0PBAQDAgWgMB8GA1UdEQQYMBaCCWxvY2FsaG9zdIIJMTI3LjAuMC4x\nMB0GA1UdJQQWMBQGCCsGAQUFBwMBBggrBgEFBQcDAjAMBgNVHRMBAf8EAjAAMB0G\nA1UdDgQWBBQeYiCWUR3IQ5HizNkNveKJxxtO1zAfBgNVHSMEGDAWgBQO2anTJjUx\nfbD3ZGoM5HD0Qe5GZTANBgkqhkiG9w0BAQsFAAOCAQEAagXAO/mu69ttNHNnnXMH\nOJRBh8DrXxKFjAE23B4G9rKNO3Cf1LMumve6cpPZdr+t2Ea6Oop8atjSNts5xY7V\nWbz0Kv+Itsf2iJEvtoNwUjsQcI07zSA+OJaGYBiYwZYRT7F2fk13FEnFWyEiVkeu\nb3a9ZP3tTs1pyYt9shFLYugtPakt1fOt4hvFTofjRRFUUSQJRPgED/GFC072FFPO\nIYl6V7l3I05CGuHPR3Cw8QrUVksEbkhh0tc4b+M32R6NcO/QwOsvLmAkGOMuc+LA\nYg1PtJgafrukjDRW1eZUQVOgWgDS+Jzj+W/5AzxNelICZ8sQEn3Eq/gI7MxMlcjQ\nLg==\n-----END CERTIFICATE-----\n'}) => {
    "ansible_loop_var": "item",
    "changed": false,
    "cmd": [
        "getcert",
        "stop-tracking",
        "-f",
        "/etc/pki/tls/certs/podman_registry.crt"
    ],
    "delta": "0:00:00.025375",
    "end": "2024-12-14 11:31:34.050132",
    "item": {
        "ca": "/etc/pki/tls/certs/podman_registry.crt",
        "ca_content": "-----BEGIN CERTIFICATE-----\nMIIDjTCCAnWgAwIBAgIQDmOc6DfQQ1KtLzzkmkQw8jANBgkqhkiG9w0BAQsFADBQ\nMSAwHgYDVQQDDBdMb2NhbCBTaWduaW5nIEF1dGhvcml0eTEsMCoGA1UEAwwjMGU2\nMzljZTgtMzdkMDQzNTItYWQyZjNjZTQtOWE0NDMwZjEwHhcNMjQxMjE0MTYzMTMy\nWhcNMjUxMjE0MTYzMTMxWjAUMRIwEAYDVQQDEwlsb2NhbGhvc3QwggEiMA0GCSqG\nSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC1vZojkqbQ719yvMsZ/p+J8wBYYJNCD8DD\n9WtICBdNqSTo/zPIwnOyuZRDBABh5En8gqlmlevaIaUIoDG/697e/8qEDJwycxmr\nExkrNGLO4AP3UzalHf9rjw8c+kQLiw1Kuc7hNUwCIGJL5QBuQy3cC6lWmzTogU5f\ncDbfuDdgRnr0HXjkVdmUWBLNVsECe5++49dsi+D77cBPjV2/xNaaZEFi3qOfZuna\nFZIX5dKnU0d/p2dgK9v2REsQsZhiyWE+3HzCckz/VWeBuN9NO6THu+BZ6bfO0S/N\nHsmlzh1aNmHJg95WKJK8v5bMLiBANkJQ36pJ9ao7pXI98eCSdDOfAgMBAAGjgZ4w\ngZswCwYDVR0PBAQDAgWgMB8GA1UdEQQYMBaCCWxvY2FsaG9zdIIJMTI3LjAuMC4x\nMB0GA1UdJQQWMBQGCCsGAQUFBwMBBggrBgEFBQcDAjAMBgNVHRMBAf8EAjAAMB0G\nA1UdDgQWBBQeYiCWUR3IQ5HizNkNveKJxxtO1zAfBgNVHSMEGDAWgBQO2anTJjUx\nfbD3ZGoM5HD0Qe5GZTANBgkqhkiG9w0BAQsFAAOCAQEAagXAO/mu69ttNHNnnXMH\nOJRBh8DrXxKFjAE23B4G9rKNO3Cf1LMumve6cpPZdr+t2Ea6Oop8atjSNts5xY7V\nWbz0Kv+Itsf2iJEvtoNwUjsQcI07zSA+OJaGYBiYwZYRT7F2fk13FEnFWyEiVkeu\nb3a9ZP3tTs1pyYt9shFLYugtPakt1fOt4hvFTofjRRFUUSQJRPgED/GFC072FFPO\nIYl6V7l3I05CGuHPR3Cw8QrUVksEbkhh0tc4b+M32R6NcO/QwOsvLmAkGOMuc+LA\nYg1PtJgafrukjDRW1eZUQVOgWgDS+Jzj+W/5AzxNelICZ8sQEn3Eq/gI7MxMlcjQ\nLg==\n-----END CERTIFICATE-----\n",
        "cert": "/etc/pki/tls/certs/podman_registry.crt",
        "cert_content": "-----BEGIN CERTIFICATE-----\nMIIDjTCCAnWgAwIBAgIQDmOc6DfQQ1KtLzzkmkQw8jANBgkqhkiG9w0BAQsFADBQ\nMSAwHgYDVQQDDBdMb2NhbCBTaWduaW5nIEF1dGhvcml0eTEsMCoGA1UEAwwjMGU2\nMzljZTgtMzdkMDQzNTItYWQyZjNjZTQtOWE0NDMwZjEwHhcNMjQxMjE0MTYzMTMy\nWhcNMjUxMjE0MTYzMTMxWjAUMRIwEAYDVQQDEwlsb2NhbGhvc3QwggEiMA0GCSqG\nSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC1vZojkqbQ719yvMsZ/p+J8wBYYJNCD8DD\n9WtICBdNqSTo/zPIwnOyuZRDBABh5En8gqlmlevaIaUIoDG/697e/8qEDJwycxmr\nExkrNGLO4AP3UzalHf9rjw8c+kQLiw1Kuc7hNUwCIGJL5QBuQy3cC6lWmzTogU5f\ncDbfuDdgRnr0HXjkVdmUWBLNVsECe5++49dsi+D77cBPjV2/xNaaZEFi3qOfZuna\nFZIX5dKnU0d/p2dgK9v2REsQsZhiyWE+3HzCckz/VWeBuN9NO6THu+BZ6bfO0S/N\nHsmlzh1aNmHJg95WKJK8v5bMLiBANkJQ36pJ9ao7pXI98eCSdDOfAgMBAAGjgZ4w\ngZswCwYDVR0PBAQDAgWgMB8GA1UdEQQYMBaCCWxvY2FsaG9zdIIJMTI3LjAuMC4x\nMB0GA1UdJQQWMBQGCCsGAQUFBwMBBggrBgEFBQcDAjAMBgNVHRMBAf8EAjAAMB0G\nA1UdDgQWBBQeYiCWUR3IQ5HizNkNveKJxxtO1zAfBgNVHSMEGDAWgBQO2anTJjUx\nfbD3ZGoM5HD0Qe5GZTANBgkqhkiG9w0BAQsFAAOCAQEAagXAO/mu69ttNHNnnXMH\nOJRBh8DrXxKFjAE23B4G9rKNO3Cf1LMumve6cpPZdr+t2Ea6Oop8atjSNts5xY7V\nWbz0Kv+Itsf2iJEvtoNwUjsQcI07zSA+OJaGYBiYwZYRT7F2fk13FEnFWyEiVkeu\nb3a9ZP3tTs1pyYt9shFLYugtPakt1fOt4hvFTofjRRFUUSQJRPgED/GFC072FFPO\nIYl6V7l3I05CGuHPR3Cw8QrUVksEbkhh0tc4b+M32R6NcO/QwOsvLmAkGOMuc+LA\nYg1PtJgafrukjDRW1eZUQVOgWgDS+Jzj+W/5AzxNelICZ8sQEn3Eq/gI7MxMlcjQ\nLg==\n-----END CERTIFICATE-----\n",
        "key": "/etc/pki/tls/private/podman_registry.key",
        "key_content": "-----BEGIN PRIVATE KEY-----\nMIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQC1vZojkqbQ719y\nvMsZ/p+J8wBYYJNCD8DD9WtICBdNqSTo/zPIwnOyuZRDBABh5En8gqlmlevaIaUI\noDG/697e/8qEDJwycxmrExkrNGLO4AP3UzalHf9rjw8c+kQLiw1Kuc7hNUwCIGJL\n5QBuQy3cC6lWmzTogU5fcDbfuDdgRnr0HXjkVdmUWBLNVsECe5++49dsi+D77cBP\njV2/xNaaZEFi3qOfZunaFZIX5dKnU0d/p2dgK9v2REsQsZhiyWE+3HzCckz/VWeB\nuN9NO6THu+BZ6bfO0S/NHsmlzh1aNmHJg95WKJK8v5bMLiBANkJQ36pJ9ao7pXI9\n8eCSdDOfAgMBAAECggEARc+3wntmbfLNeN8Rq4IWGaEJHGm4l1NCymNfGvPUs8XK\nAmBprXAYjmpgC+AhH3lYfFFariffydJLuUUD50O9QG8EhXXsmCqDjSYQcHq2i94c\nV+iUoJel0lTJSunNsptu9ynp0LOu51A2Qg4M/M38cz7dhQGosLle+CU8RvLV/rpe\nVIdslLnJP9tILBCYDVK8rCEt4qHindHBMTUMnZzwcnlO1ejX6KeNwbaGwlFcPvm2\nm7g/mNOGUQPBeXvZxM3URWnM+N2WwvTdBzpDw0UWLFe24QnGySDuX70qiVoVVeOM\nWiSp2YpUDODZMBzb6F/wscNPYIN2qsfgoIBmE9UFRQKBgQDfwYUoMx/b01PJm3EV\nlkDlCocmXoK68Ltm8a3ayRjqA414Vc3uEu68x3JtbjIBszkn4+bZiTvoHEsan301\ni0ZuJoJ66UQpRy+KwMnZ4vJDw8JwbSXAGXzjKTMRpN1MKEsZjTgVgpJ7ysihOs8C\nqKLnJxUc+V7dEqJUgCXZRW054wKBgQDP7h09CgDINm7aFF+vvtWiUMSo1xWt7Nk6\nSRR/wjgeZy3mc9oWj6mzOZJS7a5cIixXJa5eXq6tGFLyOce485efWH8+BEqKUbbO\nfZyd2TOgxVrMWTi651QCcLl0oO8Kb8stFl7Z+Xga+FJEJjxAOwDkGOKa+9eSNoa+\nsqgUieH8FQKBgBdB6jFPfxQ+VwsxmQOBZqVn/rxCPpxnwVLl9Ag1tt08GUxUvt7s\nINyGbqJpPdFna5111tztoshxiGzv/ve9zkhMwGs+K9OOjqaBe3mmWUAp7N/6uF3L\nSGvkMGa5GDd+SLATOa0isU8kuM3pavL/zE7yhQNJ+E5KTY0y9Lwstzm5AoGBAIJX\nZ2it7pOS26ZFAz5tyAl5y+1y9poSUJGi4gC8ROV6FGHT2bAu2lgJDNqDN7t1iIoQ\nit+JRsIm70f8RGCCAlfW59x+YCOY2ZlTUGv6jcyIKkTS4Sb4f8cP1pFvmKjYL2rS\nDR9irJFrfFZQCZDR69sQZmx01ktHduvvnkFRtaGJAoGBAKFv00YY/n/MVhJbgnHR\n01bfWW8Ok7rygXLIPH2woCR3NK1M+/9AW1PPnjvCiL6GGS42qwHrgGujG4FbklHd\nT3bDvgu5tiye/q+JZkaIpyJco4udWQf0FdZ+p63p5jWq79IlwFuNYihbyn5TyEC7\nHkj6H+dYRdARQKGBlKgnjepv\n-----END PRIVATE KEY-----\n"
    },
    "rc": 0,
    "start": "2024-12-14 11:31:34.024757"
}

STDOUT:

Request "20241214163131" removed.

TASK [fedora.linux_system_roles.certificate : Remove files] ********************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:181
Saturday 14 December 2024  11:31:34 -0500 (0:00:00.479)       0:01:29.916 ***** 
changed: [managed-node1] => (item=/etc/pki/tls/certs/podman_registry.crt) => {
    "ansible_loop_var": "item",
    "changed": true,
    "item": "/etc/pki/tls/certs/podman_registry.crt",
    "path": "/etc/pki/tls/certs/podman_registry.crt",
    "state": "absent"
}
changed: [managed-node1] => (item=/etc/pki/tls/private/podman_registry.key) => {
    "ansible_loop_var": "item",
    "changed": true,
    "item": "/etc/pki/tls/private/podman_registry.key",
    "path": "/etc/pki/tls/private/podman_registry.key",
    "state": "absent"
}
ok: [managed-node1] => (item=/etc/pki/tls/certs/podman_registry.crt) => {
    "ansible_loop_var": "item",
    "changed": false,
    "item": "/etc/pki/tls/certs/podman_registry.crt",
    "path": "/etc/pki/tls/certs/podman_registry.crt",
    "state": "absent"
}

TASK [Write cert for registry] *************************************************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/tests/podman/tasks/setup_registry.yml:34
Saturday 14 December 2024  11:31:35 -0500 (0:00:01.201)       0:01:31.118 ***** 
changed: [managed-node1] => {
    "changed": true,
    "checksum": "a56753cb72985d5015d277aab9534d583f3099c2",
    "dest": "/tmp/lsr_6ehua9m0_podman/auth/registry_cert.crt",
    "gid": 0,
    "group": "root",
    "md5sum": "78891be3edc7a947b70845136b822329",
    "mode": "0600",
    "owner": "root",
    "secontext": "unconfined_u:object_r:admin_home_t:s0",
    "size": 1294,
    "src": "/root/.ansible/tmp/ansible-tmp-1734193895.4098308-8404-275762393983029/.source.crt",
    "state": "file",
    "uid": 0
}

TASK [Write key for registry] **************************************************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/tests/podman/tasks/setup_registry.yml:40
Saturday 14 December 2024  11:31:36 -0500 (0:00:00.832)       0:01:31.950 ***** 
changed: [managed-node1] => {
    "changed": true,
    "checksum": "3c4bd2383044d864f778448dd3788c2bdf7f63a0",
    "dest": "/tmp/lsr_6ehua9m0_podman/auth/registry_key.pem",
    "gid": 0,
    "group": "root",
    "md5sum": "84e9e0cbc8809936e72aa68134aa538e",
    "mode": "0600",
    "owner": "root",
    "secontext": "unconfined_u:object_r:admin_home_t:s0",
    "size": 1704,
    "src": "/root/.ansible/tmp/ansible-tmp-1734193896.2446203-8452-271283031401177/.source.pem",
    "state": "file",
    "uid": 0
}

TASK [Write CA cert for registry] **********************************************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/tests/podman/tasks/setup_registry.yml:46
Saturday 14 December 2024  11:31:36 -0500 (0:00:00.731)       0:01:32.682 ***** 
changed: [managed-node1] => {
    "changed": true,
    "checksum": "a56753cb72985d5015d277aab9534d583f3099c2",
    "dest": "/tmp/lsr_6ehua9m0_podman/auth/ca.crt",
    "gid": 0,
    "group": "root",
    "md5sum": "78891be3edc7a947b70845136b822329",
    "mode": "0600",
    "owner": "root",
    "secontext": "unconfined_u:object_r:admin_home_t:s0",
    "size": 1294,
    "src": "/root/.ansible/tmp/ansible-tmp-1734193896.9596615-8488-86842030413265/.source.crt",
    "state": "file",
    "uid": 0
}

TASK [Check if system is ostree] ***********************************************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/tests/podman/tasks/setup_registry.yml:55
Saturday 14 December 2024  11:31:37 -0500 (0:00:00.710)       0:01:33.393 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "not __podman_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [Set flag to indicate system is ostree] ***********************************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/tests/podman/tasks/setup_registry.yml:60
Saturday 14 December 2024  11:31:37 -0500 (0:00:00.034)       0:01:33.427 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "not __podman_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [Ensure test packages] ****************************************************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/tests/podman/tasks/setup_registry.yml:64
Saturday 14 December 2024  11:31:37 -0500 (0:00:00.041)       0:01:33.468 ***** 
changed: [managed-node1] => {
    "changed": true,
    "rc": 0,
    "results": [
        "Installed: httpd-tools-2.4.62-5.el10.x86_64",
        "Installed: apr-1.7.5-2.el10.x86_64",
        "Installed: apr-util-1.6.3-21.el10.x86_64",
        "Installed: apr-util-lmdb-1.6.3-21.el10.x86_64",
        "Installed: apr-util-openssl-1.6.3-21.el10.x86_64",
        "Installed: skopeo-2:1.17.0-1.el10.x86_64"
    ]
}
lsrpackages: httpd-tools skopeo

TASK [Write user and password] *************************************************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/tests/podman/tasks/setup_registry.yml:70
Saturday 14 December 2024  11:31:42 -0500 (0:00:04.748)       0:01:38.217 ***** 
changed: [managed-node1] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": true
}

TASK [Create auth.json file] ***************************************************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/tests/podman/tasks/setup_registry.yml:78
Saturday 14 December 2024  11:31:42 -0500 (0:00:00.410)       0:01:38.627 ***** 
changed: [managed-node1] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": true
}

TASK [Set paths for cleanup] ***************************************************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/tests/podman/tasks/setup_registry.yml:94
Saturday 14 December 2024  11:31:43 -0500 (0:00:00.723)       0:01:39.350 ***** 
ok: [managed-node1] => {
    "ansible_facts": {
        "__podman_cleanup_paths": [
            "/tmp/lsr_6ehua9m0_podman"
        ]
    },
    "changed": false
}

TASK [Start registry] **********************************************************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/tests/podman/tasks/setup_registry.yml:102
Saturday 14 December 2024  11:31:43 -0500 (0:00:00.033)       0:01:39.384 ***** 
changed: [managed-node1] => {
    "changed": true,
    "cmd": [
        "podman",
        "run",
        "-d",
        "-p",
        "127.0.0.1:5000:5000",
        "--name",
        "podman_registry",
        "-v",
        "/tmp/lsr_6ehua9m0_podman/auth:/auth:Z",
        "-e",
        "REGISTRY_AUTH=htpasswd",
        "-e",
        "REGISTRY_AUTH_HTPASSWD_REALM=Registry Realm",
        "-e",
        "REGISTRY_AUTH_HTPASSWD_PATH=/auth/htpasswd",
        "-e",
        "REGISTRY_HTTP_TLS_CERTIFICATE=/auth/registry_cert.crt",
        "-e",
        "REGISTRY_HTTP_TLS_KEY=/auth/registry_key.pem",
        "quay.io/libpod/registry:2.8.2"
    ],
    "delta": "0:00:02.132292",
    "end": "2024-12-14 11:31:46.071477",
    "rc": 0,
    "start": "2024-12-14 11:31:43.939185"
}

STDOUT:

013623ab41c550d739cb44f590fc1fde1e9557fa6fbbb3a443ef1d0bf0f6f57a


STDERR:

time="2024-12-14T11:31:43-05:00" level=warning msg="Failed to decode the keys [\"storage.options.overlay.pull_options\" \"storage.options.overlay.pull_options\" \"storage.options.overlay.pull_options\" \"storage.options.overlay.pull_options.enable_partial_images\" \"storage.options.overlay.pull_options.use_hard_links\" \"storage.options.overlay.pull_options.ostree_repos\" \"storage.options.overlay.pull_options.convert_images\"] from \"/usr/share/containers/storage.conf\""
Trying to pull quay.io/libpod/registry:2.8.2...
Getting image source signatures
Copying blob sha256:7264a8db6415046d36d16ba98b79778e18accee6ffa71850405994cffa9be7de
Copying blob sha256:7417fa3c6d923a722787ff60825c3c40f74621ab2e4abcc120e25b10a35e4811
Copying blob sha256:88b450dec42ebc9659f10c6fd6d2326706fec4ab7aadf0c38973d930c23c546a
Copying blob sha256:c4d48a809fc2256f8aa0aeee47998488d64409855adba00a7cb3007ab9f3286e
Copying blob sha256:121f958bea53668d782691e643e9401ea21dd36c9d81078b51964d2e82b51376
Copying config sha256:0030ba3d620c647159c935ee778991c68ef3e51a274703753b0bc530104ef5e5
Writing manifest to image destination

TASK [Wait for port] ***********************************************************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/tests/podman/tasks/setup_registry.yml:113
Saturday 14 December 2024  11:31:46 -0500 (0:00:02.554)       0:01:41.939 ***** 
ok: [managed-node1] => {
    "changed": false,
    "elapsed": 0,
    "match_groupdict": {},
    "match_groups": [],
    "path": null,
    "port": 5000,
    "search_regex": null,
    "state": "started"
}

TASK [Wait for readiness] ******************************************************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/tests/podman/tasks/setup_registry.yml:117
Saturday 14 December 2024  11:31:46 -0500 (0:00:00.598)       0:01:42.537 ***** 
ok: [managed-node1] => {
    "attempts": 1,
    "changed": false,
    "cmd": [
        "podman",
        "logs",
        "podman_registry"
    ],
    "delta": "0:00:00.033865",
    "end": "2024-12-14 11:31:47.110673",
    "rc": 0,
    "start": "2024-12-14 11:31:47.076808"
}

STDERR:

time="2024-12-14T11:31:47-05:00" level=warning msg="Failed to decode the keys [\"storage.options.overlay.pull_options\" \"storage.options.overlay.pull_options\" \"storage.options.overlay.pull_options\" \"storage.options.overlay.pull_options.enable_partial_images\" \"storage.options.overlay.pull_options.use_hard_links\" \"storage.options.overlay.pull_options.ostree_repos\" \"storage.options.overlay.pull_options.convert_images\"] from \"/usr/share/containers/storage.conf\""
time="2024-12-14T16:31:46.070266612Z" level=info msg="Starting upload purge in 20m0s" go.version=go1.19.9 instance.id=43eef437-7ea9-4821-8814-55fd249ec2bb service=registry version=2.8.2 
time="2024-12-14T16:31:46.070370235Z" level=warning msg="No HTTP secret provided - generated random secret. This may cause problems with uploads if multiple registries are behind a load-balancer. To provide a shared secret, fill in http.secret in the configuration file or set the REGISTRY_HTTP_SECRET environment variable." go.version=go1.19.9 instance.id=43eef437-7ea9-4821-8814-55fd249ec2bb service=registry version=2.8.2 
time="2024-12-14T16:31:46.070392745Z" level=info msg="redis not configured" go.version=go1.19.9 instance.id=43eef437-7ea9-4821-8814-55fd249ec2bb service=registry version=2.8.2 
time="2024-12-14T16:31:46.070524918Z" level=info msg="using inmemory blob descriptor cache" go.version=go1.19.9 instance.id=43eef437-7ea9-4821-8814-55fd249ec2bb service=registry version=2.8.2 
time="2024-12-14T16:31:46.070940613Z" level=info msg="restricting TLS version to tls1.2 or higher" go.version=go1.19.9 instance.id=43eef437-7ea9-4821-8814-55fd249ec2bb service=registry version=2.8.2 
time="2024-12-14T16:31:46.070983087Z" level=info msg="restricting TLS cipher suites to: TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384,TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384,TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256,TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256,TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256,TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256,TLS_AES_128_GCM_SHA256,TLS_CHACHA20_POLY1305_SHA256,TLS_AES_256_GCM_SHA384" go.version=go1.19.9 instance.id=43eef437-7ea9-4821-8814-55fd249ec2bb service=registry version=2.8.2 
time="2024-12-14T16:31:46.071150861Z" level=info msg="listening on [::]:5000, tls" go.version=go1.19.9 instance.id=43eef437-7ea9-4821-8814-55fd249ec2bb service=registry version=2.8.2 
2024/12/14 16:31:46 http: TLS handshake error from 10.88.0.1:38216: EOF

TASK [Convert test image names into local registry names] **********************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/tests/podman/tasks/setup_registry.yml:123
Saturday 14 December 2024  11:31:47 -0500 (0:00:00.442)       0:01:42.979 ***** 
ok: [managed-node1] => {
    "ansible_facts": {
        "podman_local_test_images": [
            "localhost:5000/libpod/testimage:20210610"
        ]
    },
    "changed": false
}

TASK [Push test images into local registry] ************************************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/tests/podman/tasks/setup_registry.yml:127
Saturday 14 December 2024  11:31:47 -0500 (0:00:00.072)       0:01:43.052 ***** 
changed: [managed-node1] => (item={'key': 'quay.io/libpod/testimage:20210610', 'value': 'localhost:5000/libpod/testimage:20210610'}) => {
    "ansible_loop_var": "item",
    "changed": true,
    "cmd": "podman pull quay.io/libpod/testimage:20210610; podman push --authfile=\"/tmp/lsr_6ehua9m0_podman/auth/auth.json\" --cert-dir=\"/tmp/lsr_6ehua9m0_podman/auth\" quay.io/libpod/testimage:20210610 docker://localhost:5000/libpod/testimage:20210610",
    "delta": "0:00:02.860934",
    "end": "2024-12-14 11:31:50.486208",
    "item": {
        "key": "quay.io/libpod/testimage:20210610",
        "value": "localhost:5000/libpod/testimage:20210610"
    },
    "rc": 0,
    "start": "2024-12-14 11:31:47.625274"
}

STDOUT:

9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f


STDERR:

time="2024-12-14T11:31:47-05:00" level=warning msg="Failed to decode the keys [\"storage.options.overlay.pull_options\" \"storage.options.overlay.pull_options\" \"storage.options.overlay.pull_options\" \"storage.options.overlay.pull_options.enable_partial_images\" \"storage.options.overlay.pull_options.use_hard_links\" \"storage.options.overlay.pull_options.ostree_repos\" \"storage.options.overlay.pull_options.convert_images\"] from \"/usr/share/containers/storage.conf\""
Trying to pull quay.io/libpod/testimage:20210610...
Getting image source signatures
Copying blob sha256:9afcdfe780b4ea44cc52d22e3f93ccf212388a90370773571ce034a62e14174e
Copying config sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f
Writing manifest to image destination
time="2024-12-14T11:31:49-05:00" level=warning msg="Failed to decode the keys [\"storage.options.overlay.pull_options\" \"storage.options.overlay.pull_options\" \"storage.options.overlay.pull_options\" \"storage.options.overlay.pull_options.enable_partial_images\" \"storage.options.overlay.pull_options.use_hard_links\" \"storage.options.overlay.pull_options.ostree_repos\" \"storage.options.overlay.pull_options.convert_images\"] from \"/usr/share/containers/storage.conf\""
Getting image source signatures
Copying blob sha256:f36118df491fbfd96093731809941d7bb881136415ccc114bc26d6bf10499a0e
Copying config sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f
Writing manifest to image destination

TASK [Verify test images in local registry] ************************************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/tests/podman/tasks/setup_registry.yml:136
Saturday 14 December 2024  11:31:50 -0500 (0:00:03.291)       0:01:46.343 ***** 
ok: [managed-node1] => (item=localhost:5000/libpod/testimage:20210610) => {
    "ansible_loop_var": "item",
    "changed": false,
    "cmd": [
        "skopeo",
        "inspect",
        "--authfile=/tmp/lsr_6ehua9m0_podman/auth/auth.json",
        "--cert-dir=/tmp/lsr_6ehua9m0_podman/auth",
        "docker://localhost:5000/libpod/testimage:20210610"
    ],
    "delta": "0:00:00.171219",
    "end": "2024-12-14 11:31:51.116442",
    "item": "localhost:5000/libpod/testimage:20210610",
    "rc": 0,
    "start": "2024-12-14 11:31:50.945223"
}

STDOUT:

{
    "Name": "localhost:5000/libpod/testimage",
    "Digest": "sha256:df2d1c92b4b221ea8e761c23a170c048f5f441aebbf57ea9ae0a0c617dd24930",
    "RepoTags": [
        "20210610"
    ],
    "Created": "2021-06-10T18:55:43.049643585Z",
    "DockerVersion": "",
    "Labels": {
        "created_at": "2021-06-10T18:55:36Z",
        "created_by": "test/system/build-testimage",
        "io.buildah.version": "1.21.0"
    },
    "Architecture": "amd64",
    "Os": "linux",
    "Layers": [
        "sha256:f95d0ccea02baa28cb140faa6b05969d1836c75b6bb0c4891463362deec40628"
    ],
    "LayersData": [
        {
            "MIMEType": "application/vnd.oci.image.layer.v1.tar+zstd",
            "Digest": "sha256:f95d0ccea02baa28cb140faa6b05969d1836c75b6bb0c4891463362deec40628",
            "Size": 4828626,
            "Annotations": {
                "io.github.containers.zstd-chunked.manifest-checksum": "sha256:e08e64782bcda1f45c478a8196d49e5c8cefefbc8d3630566c27349ad1270b8a",
                "io.github.containers.zstd-chunked.manifest-position": "4793731:18610:106412:1",
                "io.github.containers.zstd-chunked.tarsplit-position": "4812349:16205:433844"
            }
        }
    ],
    "Env": [
        "PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"
    ]
}

TASK [Run the role with no credentials and no cert checking] *******************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/tests/podman/tests_auth_and_security.yml:58
Saturday 14 December 2024  11:31:51 -0500 (0:00:00.620)       0:01:46.964 ***** 
included: fedora.linux_system_roles.podman for managed-node1

TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:3
Saturday 14 December 2024  11:31:51 -0500 (0:00:00.175)       0:01:47.139 ***** 
included: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml for managed-node1

TASK [fedora.linux_system_roles.podman : Ensure ansible_facts used by role] ****
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:3
Saturday 14 December 2024  11:31:51 -0500 (0:00:00.084)       0:01:47.224 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "__podman_required_facts | difference(ansible_facts.keys() | list) | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Check if system is ostree] ************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:11
Saturday 14 December 2024  11:31:51 -0500 (0:00:00.059)       0:01:47.283 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "not __podman_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set flag to indicate system is ostree] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:16
Saturday 14 December 2024  11:31:51 -0500 (0:00:00.054)       0:01:47.338 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "not __podman_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Check if transactional-update exists in /sbin] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:23
Saturday 14 December 2024  11:31:51 -0500 (0:00:00.082)       0:01:47.421 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "not __podman_is_transactional is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set flag if transactional-update exists] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:28
Saturday 14 December 2024  11:31:51 -0500 (0:00:00.106)       0:01:47.527 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "not __podman_is_transactional is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:32
Saturday 14 December 2024  11:31:51 -0500 (0:00:00.079)       0:01:47.607 ***** 
ok: [managed-node1] => (item=RedHat.yml) => {
    "ansible_facts": {
        "__podman_packages": [
            "podman",
            "shadow-utils-subid"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/vars/RedHat.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "RedHat.yml"
}
skipping: [managed-node1] => (item=CentOS.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "CentOS.yml",
    "skip_reason": "Conditional result was False"
}
ok: [managed-node1] => (item=CentOS_10.yml) => {
    "ansible_facts": {
        "__podman_packages": [
            "iptables-nft",
            "podman",
            "shadow-utils-subid"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_10.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_10.yml"
}
ok: [managed-node1] => (item=CentOS_10.yml) => {
    "ansible_facts": {
        "__podman_packages": [
            "iptables-nft",
            "podman",
            "shadow-utils-subid"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_10.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_10.yml"
}

TASK [fedora.linux_system_roles.podman : Gather the package facts] *************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6
Saturday 14 December 2024  11:31:52 -0500 (0:00:00.198)       0:01:47.805 ***** 
ok: [managed-node1] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Enable copr if requested] *************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:10
Saturday 14 December 2024  11:31:52 -0500 (0:00:00.788)       0:01:48.594 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "podman_use_copr | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Ensure required packages are installed] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:14
Saturday 14 December 2024  11:31:52 -0500 (0:00:00.034)       0:01:48.628 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "(__podman_packages | difference(ansible_facts.packages))",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Notify user that reboot is needed to apply changes] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:28
Saturday 14 December 2024  11:31:52 -0500 (0:00:00.051)       0:01:48.680 ***** 
skipping: [managed-node1] => {
    "false_condition": "__podman_is_transactional | d(false)"
}

TASK [fedora.linux_system_roles.podman : Reboot transactional update systems] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:33
Saturday 14 December 2024  11:31:52 -0500 (0:00:00.056)       0:01:48.737 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "__podman_is_transactional | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Fail if reboot is needed and not set] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:38
Saturday 14 December 2024  11:31:53 -0500 (0:00:00.068)       0:01:48.805 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "__podman_is_transactional | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Get podman version] *******************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:46
Saturday 14 December 2024  11:31:53 -0500 (0:00:00.081)       0:01:48.887 ***** 
ok: [managed-node1] => {
    "changed": false,
    "cmd": [
        "podman",
        "--version"
    ],
    "delta": "0:00:00.024226",
    "end": "2024-12-14 11:31:53.539218",
    "rc": 0,
    "start": "2024-12-14 11:31:53.514992"
}

STDOUT:

podman version 5.3.1


STDERR:

time="2024-12-14T11:31:53-05:00" level=warning msg="Failed to decode the keys [\"storage.options.overlay.pull_options\" \"storage.options.overlay.pull_options\" \"storage.options.overlay.pull_options\" \"storage.options.overlay.pull_options.enable_partial_images\" \"storage.options.overlay.pull_options.use_hard_links\" \"storage.options.overlay.pull_options.ostree_repos\" \"storage.options.overlay.pull_options.convert_images\"] from \"/usr/share/containers/storage.conf\""

TASK [fedora.linux_system_roles.podman : Set podman version] *******************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:52
Saturday 14 December 2024  11:31:53 -0500 (0:00:00.552)       0:01:49.439 ***** 
ok: [managed-node1] => {
    "ansible_facts": {
        "podman_version": "5.3.1"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Podman package version must be 4.2 or later] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:56
Saturday 14 December 2024  11:31:53 -0500 (0:00:00.084)       0:01:49.523 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "podman_version is version(\"4.2\", \"<\")",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:63
Saturday 14 December 2024  11:31:53 -0500 (0:00:00.075)       0:01:49.599 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "podman_version is version(\"4.4\", \"<\")",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:73
Saturday 14 December 2024  11:31:53 -0500 (0:00:00.075)       0:01:49.675 ***** 
META: end_host conditional evaluated to False, continuing execution for managed-node1
skipping: [managed-node1] => {
    "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node1"
}

MSG:

end_host conditional evaluated to false, continuing execution for managed-node1

TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:80
Saturday 14 December 2024  11:31:54 -0500 (0:00:00.132)       0:01:49.807 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "__has_type_pod or __has_pod_file_ext or __has_pod_file_src_ext or __has_pod_template_src_ext or __has_pod_template_src_ext_j2",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:96
Saturday 14 December 2024  11:31:54 -0500 (0:00:00.081)       0:01:49.888 ***** 
META: end_host conditional evaluated to False, continuing execution for managed-node1
skipping: [managed-node1] => {
    "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node1"
}

MSG:

end_host conditional evaluated to false, continuing execution for managed-node1

TASK [fedora.linux_system_roles.podman : Check user and group information] *****
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:109
Saturday 14 December 2024  11:31:54 -0500 (0:00:00.147)       0:01:50.036 ***** 
included: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1

TASK [fedora.linux_system_roles.podman : Get user information] *****************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2
Saturday 14 December 2024  11:31:54 -0500 (0:00:00.122)       0:01:50.159 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Fail if user does not exist] **********
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9
Saturday 14 December 2024  11:31:54 -0500 (0:00:00.060)       0:01:50.220 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set group for podman user] ************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16
Saturday 14 December 2024  11:31:54 -0500 (0:00:00.075)       0:01:50.296 ***** 
ok: [managed-node1] => {
    "ansible_facts": {
        "__podman_group": "0"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : See if getsubids exists] **************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:28
Saturday 14 December 2024  11:31:54 -0500 (0:00:00.070)       0:01:50.366 ***** 
ok: [managed-node1] => {
    "changed": false,
    "stat": {
        "atime": 1734193880.1628356,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 32,
        "charset": "binary",
        "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97",
        "ctime": 1734193861.1678128,
        "dev": 51714,
        "device_type": 0,
        "executable": true,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 8859182,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "application/x-pie-executable",
        "mode": "0755",
        "mtime": 1730678400.0,
        "nlink": 1,
        "path": "/usr/bin/getsubids",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": true,
        "rusr": true,
        "size": 15744,
        "uid": 0,
        "version": "2878164177",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": true,
        "xoth": true,
        "xusr": true
    }
}

TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:39
Saturday 14 December 2024  11:31:55 -0500 (0:00:00.439)       0:01:50.805 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "__podman_user not in [\"root\", \"0\"]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:44
Saturday 14 December 2024  11:31:55 -0500 (0:00:00.054)       0:01:50.860 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "__podman_user not in [\"root\", \"0\"]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:49
Saturday 14 December 2024  11:31:55 -0500 (0:00:00.083)       0:01:50.944 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "__podman_user not in [\"root\", \"0\"]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Get subuid file] **********************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:62
Saturday 14 December 2024  11:31:55 -0500 (0:00:00.106)       0:01:51.050 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Get subgid file] **********************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:67
Saturday 14 December 2024  11:31:55 -0500 (0:00:00.091)       0:01:51.142 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:72
Saturday 14 December 2024  11:31:55 -0500 (0:00:00.063)       0:01:51.206 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ******
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:82
Saturday 14 December 2024  11:31:55 -0500 (0:00:00.060)       0:01:51.266 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ******
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:89
Saturday 14 December 2024  11:31:55 -0500 (0:00:00.060)       0:01:51.327 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set config file paths] ****************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:115
Saturday 14 December 2024  11:31:55 -0500 (0:00:00.055)       0:01:51.382 ***** 
ok: [managed-node1] => {
    "ansible_facts": {
        "__podman_container_conf_file": "/etc/containers/containers.conf.d/50-systemroles.conf",
        "__podman_policy_json_file": "/etc/containers/policy.json",
        "__podman_registries_conf_file": "/etc/containers/registries.conf.d/50-systemroles.conf",
        "__podman_storage_conf_file": "/etc/containers/storage.conf"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Handle container.conf.d] **************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:124
Saturday 14 December 2024  11:31:55 -0500 (0:00:00.090)       0:01:51.473 ***** 
included: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml for managed-node1

TASK [fedora.linux_system_roles.podman : Ensure containers.d exists] ***********
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:5
Saturday 14 December 2024  11:31:55 -0500 (0:00:00.091)       0:01:51.564 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "podman_containers_conf | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Update container config file] *********
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:13
Saturday 14 December 2024  11:31:55 -0500 (0:00:00.103)       0:01:51.667 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "podman_containers_conf | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Handle registries.conf.d] *************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:127
Saturday 14 December 2024  11:31:55 -0500 (0:00:00.075)       0:01:51.743 ***** 
included: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml for managed-node1

TASK [fedora.linux_system_roles.podman : Ensure registries.d exists] ***********
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:5
Saturday 14 December 2024  11:31:56 -0500 (0:00:00.107)       0:01:51.850 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "podman_registries_conf | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Update registries config file] ********
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:13
Saturday 14 December 2024  11:31:56 -0500 (0:00:00.044)       0:01:51.894 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "podman_registries_conf | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Handle storage.conf] ******************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:130
Saturday 14 December 2024  11:31:56 -0500 (0:00:00.042)       0:01:51.937 ***** 
included: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml for managed-node1

TASK [fedora.linux_system_roles.podman : Ensure storage.conf parent dir exists] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:5
Saturday 14 December 2024  11:31:56 -0500 (0:00:00.061)       0:01:51.999 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "podman_storage_conf | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Update storage config file] ***********
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:13
Saturday 14 December 2024  11:31:56 -0500 (0:00:00.031)       0:01:52.030 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "podman_storage_conf | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Handle policy.json] *******************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:133
Saturday 14 December 2024  11:31:56 -0500 (0:00:00.031)       0:01:52.061 ***** 
included: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml for managed-node1

TASK [fedora.linux_system_roles.podman : Ensure policy.json parent dir exists] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:6
Saturday 14 December 2024  11:31:56 -0500 (0:00:00.093)       0:01:52.154 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "podman_policy_json | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Stat the policy.json file] ************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:14
Saturday 14 December 2024  11:31:56 -0500 (0:00:00.033)       0:01:52.188 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "podman_policy_json | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Get the existing policy.json] *********
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:19
Saturday 14 December 2024  11:31:56 -0500 (0:00:00.037)       0:01:52.225 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "podman_policy_json | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Write new policy.json file] ***********
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:25
Saturday 14 December 2024  11:31:56 -0500 (0:00:00.033)       0:01:52.259 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "podman_policy_json | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [Manage firewall for specified ports] *************************************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:139
Saturday 14 December 2024  11:31:56 -0500 (0:00:00.031)       0:01:52.291 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "podman_firewall | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [Manage selinux for specified ports] **************************************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:146
Saturday 14 December 2024  11:31:56 -0500 (0:00:00.030)       0:01:52.321 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "podman_selinux_ports | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Keep track of users that need to cancel linger] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:153
Saturday 14 December 2024  11:31:56 -0500 (0:00:00.031)       0:01:52.352 ***** 
ok: [managed-node1] => {
    "ansible_facts": {
        "__podman_cancel_user_linger": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Handle certs.d files - present] *******
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:157
Saturday 14 December 2024  11:31:56 -0500 (0:00:00.030)       0:01:52.383 ***** 
skipping: [managed-node1] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Handle credential files - present] ****
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:166
Saturday 14 December 2024  11:31:56 -0500 (0:00:00.027)       0:01:52.410 ***** 
skipping: [managed-node1] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Handle secrets] ***********************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:175
Saturday 14 December 2024  11:31:56 -0500 (0:00:00.029)       0:01:52.440 ***** 
skipping: [managed-node1] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Handle Kubernetes specifications] *****
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:182
Saturday 14 December 2024  11:31:56 -0500 (0:00:00.089)       0:01:52.529 ***** 
included: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml for managed-node1 => (item=(censored due to no_log))

TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:14
Saturday 14 December 2024  11:31:56 -0500 (0:00:00.145)       0:01:52.675 ***** 
ok: [managed-node1] => {
    "ansible_facts": {
        "__podman_kube_spec": {
            "state": "started"
        },
        "__podman_kube_str": "apiVersion: v1\nkind: Pod\nmetadata:\n    labels:\n        app: test\n        io.containers.autoupdate: registry\n    name: auth_test_1_kube\nspec:\n    containers:\n    -   image: localhost:5000/libpod/testimage:20210610\n        name: auth_test_1_kube\n"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:21
Saturday 14 December 2024  11:31:56 -0500 (0:00:00.082)       0:01:52.758 ***** 
ok: [managed-node1] => {
    "ansible_facts": {
        "__podman_continue_if_pull_fails": false,
        "__podman_kube": {
            "apiVersion": "v1",
            "kind": "Pod",
            "metadata": {
                "labels": {
                    "app": "test",
                    "io.containers.autoupdate": "registry"
                },
                "name": "auth_test_1_kube"
            },
            "spec": {
                "containers": [
                    {
                        "image": "localhost:5000/libpod/testimage:20210610",
                        "name": "auth_test_1_kube"
                    }
                ]
            }
        },
        "__podman_kube_file": "",
        "__podman_pull_image": true,
        "__podman_state": "created",
        "__podman_systemd_unit_scope": "",
        "__podman_user": "root"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:33
Saturday 14 December 2024  11:31:57 -0500 (0:00:00.075)       0:01:52.834 ***** 
ok: [managed-node1] => {
    "ansible_facts": {
        "__podman_kube_name": "auth_test_1_kube",
        "__podman_rootless": false
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Check user and group information] *****
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:38
Saturday 14 December 2024  11:31:57 -0500 (0:00:00.063)       0:01:52.897 ***** 
included: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1

TASK [fedora.linux_system_roles.podman : Get user information] *****************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2
Saturday 14 December 2024  11:31:57 -0500 (0:00:00.106)       0:01:53.003 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Fail if user does not exist] **********
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9
Saturday 14 December 2024  11:31:57 -0500 (0:00:00.056)       0:01:53.060 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set group for podman user] ************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16
Saturday 14 December 2024  11:31:57 -0500 (0:00:00.059)       0:01:53.120 ***** 
ok: [managed-node1] => {
    "ansible_facts": {
        "__podman_group": "0"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : See if getsubids exists] **************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:28
Saturday 14 December 2024  11:31:57 -0500 (0:00:00.103)       0:01:53.223 ***** 
ok: [managed-node1] => {
    "changed": false,
    "stat": {
        "atime": 1734193880.1628356,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 32,
        "charset": "binary",
        "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97",
        "ctime": 1734193861.1678128,
        "dev": 51714,
        "device_type": 0,
        "executable": true,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 8859182,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "application/x-pie-executable",
        "mode": "0755",
        "mtime": 1730678400.0,
        "nlink": 1,
        "path": "/usr/bin/getsubids",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": true,
        "rusr": true,
        "size": 15744,
        "uid": 0,
        "version": "2878164177",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": true,
        "xoth": true,
        "xusr": true
    }
}

TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:39
Saturday 14 December 2024  11:31:57 -0500 (0:00:00.405)       0:01:53.628 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "__podman_user not in [\"root\", \"0\"]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:44
Saturday 14 December 2024  11:31:57 -0500 (0:00:00.038)       0:01:53.667 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "__podman_user not in [\"root\", \"0\"]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:49
Saturday 14 December 2024  11:31:57 -0500 (0:00:00.049)       0:01:53.717 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "__podman_user not in [\"root\", \"0\"]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Get subuid file] **********************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:62
Saturday 14 December 2024  11:31:57 -0500 (0:00:00.047)       0:01:53.764 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Get subgid file] **********************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:67
Saturday 14 December 2024  11:31:58 -0500 (0:00:00.048)       0:01:53.812 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:72
Saturday 14 December 2024  11:31:58 -0500 (0:00:00.033)       0:01:53.846 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ******
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:82
Saturday 14 December 2024  11:31:58 -0500 (0:00:00.037)       0:01:53.884 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ******
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:89
Saturday 14 December 2024  11:31:58 -0500 (0:00:00.041)       0:01:53.926 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Fail if no kube spec is given] ********
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:43
Saturday 14 December 2024  11:31:58 -0500 (0:00:00.041)       0:01:53.967 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "not __podman_kube",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:52
Saturday 14 December 2024  11:31:58 -0500 (0:00:00.048)       0:01:54.016 ***** 
ok: [managed-node1] => {
    "ansible_facts": {
        "__podman_activate_systemd_unit": true,
        "__podman_systemd_scope": "system",
        "__podman_user_home_dir": "/root",
        "__podman_xdg_runtime_dir": "/run/user/0"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:60
Saturday 14 December 2024  11:31:58 -0500 (0:00:00.076)       0:01:54.093 ***** 
ok: [managed-node1] => {
    "ansible_facts": {
        "__podman_kube_path": "/etc/containers/ansible-kubernetes.d"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:64
Saturday 14 December 2024  11:31:58 -0500 (0:00:00.082)       0:01:54.175 ***** 
ok: [managed-node1] => {
    "ansible_facts": {
        "__podman_kube_file": "/etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:68
Saturday 14 December 2024  11:31:58 -0500 (0:00:00.039)       0:01:54.215 ***** 
ok: [managed-node1] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Get service name using systemd-escape] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:75
Saturday 14 December 2024  11:31:58 -0500 (0:00:00.077)       0:01:54.293 ***** 
ok: [managed-node1] => {
    "changed": false,
    "cmd": [
        "systemd-escape",
        "--template",
        "podman-kube@.service",
        "/etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml"
    ],
    "delta": "0:00:00.005597",
    "end": "2024-12-14 11:31:58.858847",
    "rc": 0,
    "start": "2024-12-14 11:31:58.853250"
}

STDOUT:

podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service

TASK [fedora.linux_system_roles.podman : Cleanup containers and services] ******
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:83
Saturday 14 December 2024  11:31:58 -0500 (0:00:00.405)       0:01:54.699 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "__podman_state == \"absent\"",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Create and update containers and services] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:87
Saturday 14 December 2024  11:31:58 -0500 (0:00:00.034)       0:01:54.733 ***** 
included: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml for managed-node1

TASK [fedora.linux_system_roles.podman : Manage linger] ************************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:2
Saturday 14 December 2024  11:31:59 -0500 (0:00:00.069)       0:01:54.803 ***** 
included: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node1

TASK [fedora.linux_system_roles.podman : Enable linger if needed] **************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12
Saturday 14 December 2024  11:31:59 -0500 (0:00:00.052)       0:01:54.855 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "__podman_rootless | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18
Saturday 14 December 2024  11:31:59 -0500 (0:00:00.035)       0:01:54.891 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "__podman_rootless | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22
Saturday 14 December 2024  11:31:59 -0500 (0:00:00.035)       0:01:54.927 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "__podman_rootless | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Get the host mount volumes] ***********
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:7
Saturday 14 December 2024  11:31:59 -0500 (0:00:00.048)       0:01:54.975 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "podman_create_host_directories | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Create host directories] **************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:18
Saturday 14 December 2024  11:31:59 -0500 (0:00:00.040)       0:01:55.016 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "podman_create_host_directories | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Ensure container images are present] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:29
Saturday 14 December 2024  11:31:59 -0500 (0:00:00.063)       0:01:55.079 ***** 
failed: [managed-node1] (item=None) => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}
fatal: [managed-node1]: FAILED! => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [Check error] *************************************************************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/tests/podman/tests_auth_and_security.yml:73
Saturday 14 December 2024  11:32:00 -0500 (0:00:00.925)       0:01:56.004 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "not expected_msg in ansible_failed_result.results[0].msg | d(\"\")",
    "skip_reason": "Conditional result was False"
}

TASK [Run the role with credentials and cert checking] *************************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/tests/podman/tests_auth_and_security.yml:84
Saturday 14 December 2024  11:32:00 -0500 (0:00:00.050)       0:01:56.055 ***** 
included: fedora.linux_system_roles.podman for managed-node1

TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:3
Saturday 14 December 2024  11:32:00 -0500 (0:00:00.118)       0:01:56.173 ***** 
included: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml for managed-node1

TASK [fedora.linux_system_roles.podman : Ensure ansible_facts used by role] ****
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:3
Saturday 14 December 2024  11:32:00 -0500 (0:00:00.089)       0:01:56.263 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "__podman_required_facts | difference(ansible_facts.keys() | list) | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Check if system is ostree] ************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:11
Saturday 14 December 2024  11:32:00 -0500 (0:00:00.154)       0:01:56.417 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "not __podman_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set flag to indicate system is ostree] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:16
Saturday 14 December 2024  11:32:00 -0500 (0:00:00.090)       0:01:56.508 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "not __podman_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Check if transactional-update exists in /sbin] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:23
Saturday 14 December 2024  11:32:00 -0500 (0:00:00.060)       0:01:56.568 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "not __podman_is_transactional is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set flag if transactional-update exists] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:28
Saturday 14 December 2024  11:32:00 -0500 (0:00:00.059)       0:01:56.628 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "not __podman_is_transactional is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:32
Saturday 14 December 2024  11:32:00 -0500 (0:00:00.060)       0:01:56.688 ***** 
ok: [managed-node1] => (item=RedHat.yml) => {
    "ansible_facts": {
        "__podman_packages": [
            "podman",
            "shadow-utils-subid"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/vars/RedHat.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "RedHat.yml"
}
skipping: [managed-node1] => (item=CentOS.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "CentOS.yml",
    "skip_reason": "Conditional result was False"
}
ok: [managed-node1] => (item=CentOS_10.yml) => {
    "ansible_facts": {
        "__podman_packages": [
            "iptables-nft",
            "podman",
            "shadow-utils-subid"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_10.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_10.yml"
}
ok: [managed-node1] => (item=CentOS_10.yml) => {
    "ansible_facts": {
        "__podman_packages": [
            "iptables-nft",
            "podman",
            "shadow-utils-subid"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_10.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_10.yml"
}

TASK [fedora.linux_system_roles.podman : Gather the package facts] *************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6
Saturday 14 December 2024  11:32:01 -0500 (0:00:00.161)       0:01:56.850 ***** 
ok: [managed-node1] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Enable copr if requested] *************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:10
Saturday 14 December 2024  11:32:01 -0500 (0:00:00.891)       0:01:57.741 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "podman_use_copr | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Ensure required packages are installed] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:14
Saturday 14 December 2024  11:32:02 -0500 (0:00:00.036)       0:01:57.777 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "(__podman_packages | difference(ansible_facts.packages))",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Notify user that reboot is needed to apply changes] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:28
Saturday 14 December 2024  11:32:02 -0500 (0:00:00.045)       0:01:57.823 ***** 
skipping: [managed-node1] => {
    "false_condition": "__podman_is_transactional | d(false)"
}

TASK [fedora.linux_system_roles.podman : Reboot transactional update systems] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:33
Saturday 14 December 2024  11:32:02 -0500 (0:00:00.048)       0:01:57.871 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "__podman_is_transactional | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Fail if reboot is needed and not set] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:38
Saturday 14 December 2024  11:32:02 -0500 (0:00:00.037)       0:01:57.909 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "__podman_is_transactional | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Get podman version] *******************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:46
Saturday 14 December 2024  11:32:02 -0500 (0:00:00.037)       0:01:57.946 ***** 
ok: [managed-node1] => {
    "changed": false,
    "cmd": [
        "podman",
        "--version"
    ],
    "delta": "0:00:00.024798",
    "end": "2024-12-14 11:32:02.515056",
    "rc": 0,
    "start": "2024-12-14 11:32:02.490258"
}

STDOUT:

podman version 5.3.1


STDERR:

time="2024-12-14T11:32:02-05:00" level=warning msg="Failed to decode the keys [\"storage.options.overlay.pull_options\" \"storage.options.overlay.pull_options\" \"storage.options.overlay.pull_options\" \"storage.options.overlay.pull_options.enable_partial_images\" \"storage.options.overlay.pull_options.use_hard_links\" \"storage.options.overlay.pull_options.ostree_repos\" \"storage.options.overlay.pull_options.convert_images\"] from \"/usr/share/containers/storage.conf\""

TASK [fedora.linux_system_roles.podman : Set podman version] *******************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:52
Saturday 14 December 2024  11:32:02 -0500 (0:00:00.402)       0:01:58.349 ***** 
ok: [managed-node1] => {
    "ansible_facts": {
        "podman_version": "5.3.1"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Podman package version must be 4.2 or later] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:56
Saturday 14 December 2024  11:32:02 -0500 (0:00:00.035)       0:01:58.384 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "podman_version is version(\"4.2\", \"<\")",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:63
Saturday 14 December 2024  11:32:02 -0500 (0:00:00.035)       0:01:58.420 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "podman_version is version(\"4.4\", \"<\")",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:73
Saturday 14 December 2024  11:32:02 -0500 (0:00:00.098)       0:01:58.518 ***** 
META: end_host conditional evaluated to False, continuing execution for managed-node1
skipping: [managed-node1] => {
    "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node1"
}

MSG:

end_host conditional evaluated to false, continuing execution for managed-node1

TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:80
Saturday 14 December 2024  11:32:02 -0500 (0:00:00.160)       0:01:58.679 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "__has_type_pod or __has_pod_file_ext or __has_pod_file_src_ext or __has_pod_template_src_ext or __has_pod_template_src_ext_j2",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:96
Saturday 14 December 2024  11:32:03 -0500 (0:00:00.298)       0:01:58.977 ***** 
META: end_host conditional evaluated to False, continuing execution for managed-node1
skipping: [managed-node1] => {
    "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node1"
}

MSG:

end_host conditional evaluated to false, continuing execution for managed-node1

TASK [fedora.linux_system_roles.podman : Check user and group information] *****
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:109
Saturday 14 December 2024  11:32:03 -0500 (0:00:00.365)       0:01:59.343 ***** 
included: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1

TASK [fedora.linux_system_roles.podman : Get user information] *****************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2
Saturday 14 December 2024  11:32:03 -0500 (0:00:00.101)       0:01:59.445 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Fail if user does not exist] **********
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9
Saturday 14 December 2024  11:32:03 -0500 (0:00:00.061)       0:01:59.507 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set group for podman user] ************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16
Saturday 14 December 2024  11:32:03 -0500 (0:00:00.062)       0:01:59.569 ***** 
ok: [managed-node1] => {
    "ansible_facts": {
        "__podman_group": "0"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : See if getsubids exists] **************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:28
Saturday 14 December 2024  11:32:03 -0500 (0:00:00.066)       0:01:59.636 ***** 
ok: [managed-node1] => {
    "changed": false,
    "stat": {
        "atime": 1734193880.1628356,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 32,
        "charset": "binary",
        "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97",
        "ctime": 1734193861.1678128,
        "dev": 51714,
        "device_type": 0,
        "executable": true,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 8859182,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "application/x-pie-executable",
        "mode": "0755",
        "mtime": 1730678400.0,
        "nlink": 1,
        "path": "/usr/bin/getsubids",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": true,
        "rusr": true,
        "size": 15744,
        "uid": 0,
        "version": "2878164177",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": true,
        "xoth": true,
        "xusr": true
    }
}

TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:39
Saturday 14 December 2024  11:32:04 -0500 (0:00:00.423)       0:02:00.060 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "__podman_user not in [\"root\", \"0\"]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:44
Saturday 14 December 2024  11:32:04 -0500 (0:00:00.050)       0:02:00.110 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "__podman_user not in [\"root\", \"0\"]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:49
Saturday 14 December 2024  11:32:04 -0500 (0:00:00.051)       0:02:00.162 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "__podman_user not in [\"root\", \"0\"]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Get subuid file] **********************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:62
Saturday 14 December 2024  11:32:04 -0500 (0:00:00.051)       0:02:00.213 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Get subgid file] **********************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:67
Saturday 14 December 2024  11:32:04 -0500 (0:00:00.053)       0:02:00.267 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:72
Saturday 14 December 2024  11:32:04 -0500 (0:00:00.051)       0:02:00.318 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ******
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:82
Saturday 14 December 2024  11:32:04 -0500 (0:00:00.049)       0:02:00.368 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ******
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:89
Saturday 14 December 2024  11:32:04 -0500 (0:00:00.059)       0:02:00.428 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set config file paths] ****************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:115
Saturday 14 December 2024  11:32:04 -0500 (0:00:00.057)       0:02:00.486 ***** 
ok: [managed-node1] => {
    "ansible_facts": {
        "__podman_container_conf_file": "/etc/containers/containers.conf.d/50-systemroles.conf",
        "__podman_policy_json_file": "/etc/containers/policy.json",
        "__podman_registries_conf_file": "/etc/containers/registries.conf.d/50-systemroles.conf",
        "__podman_storage_conf_file": "/etc/containers/storage.conf"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Handle container.conf.d] **************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:124
Saturday 14 December 2024  11:32:04 -0500 (0:00:00.078)       0:02:00.565 ***** 
included: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml for managed-node1

TASK [fedora.linux_system_roles.podman : Ensure containers.d exists] ***********
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:5
Saturday 14 December 2024  11:32:04 -0500 (0:00:00.191)       0:02:00.756 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "podman_containers_conf | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Update container config file] *********
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:13
Saturday 14 December 2024  11:32:05 -0500 (0:00:00.111)       0:02:00.867 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "podman_containers_conf | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Handle registries.conf.d] *************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:127
Saturday 14 December 2024  11:32:05 -0500 (0:00:00.076)       0:02:00.943 ***** 
included: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml for managed-node1

TASK [fedora.linux_system_roles.podman : Ensure registries.d exists] ***********
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:5
Saturday 14 December 2024  11:32:05 -0500 (0:00:00.158)       0:02:01.102 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "podman_registries_conf | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Update registries config file] ********
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:13
Saturday 14 December 2024  11:32:05 -0500 (0:00:00.043)       0:02:01.145 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "podman_registries_conf | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Handle storage.conf] ******************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:130
Saturday 14 December 2024  11:32:05 -0500 (0:00:00.032)       0:02:01.177 ***** 
included: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml for managed-node1

TASK [fedora.linux_system_roles.podman : Ensure storage.conf parent dir exists] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:5
Saturday 14 December 2024  11:32:05 -0500 (0:00:00.057)       0:02:01.234 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "podman_storage_conf | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Update storage config file] ***********
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:13
Saturday 14 December 2024  11:32:05 -0500 (0:00:00.031)       0:02:01.266 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "podman_storage_conf | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Handle policy.json] *******************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:133
Saturday 14 December 2024  11:32:05 -0500 (0:00:00.030)       0:02:01.297 ***** 
included: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml for managed-node1

TASK [fedora.linux_system_roles.podman : Ensure policy.json parent dir exists] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:6
Saturday 14 December 2024  11:32:05 -0500 (0:00:00.058)       0:02:01.355 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "podman_policy_json | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Stat the policy.json file] ************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:14
Saturday 14 December 2024  11:32:05 -0500 (0:00:00.034)       0:02:01.390 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "podman_policy_json | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Get the existing policy.json] *********
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:19
Saturday 14 December 2024  11:32:05 -0500 (0:00:00.050)       0:02:01.440 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "podman_policy_json | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Write new policy.json file] ***********
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:25
Saturday 14 December 2024  11:32:05 -0500 (0:00:00.052)       0:02:01.493 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "podman_policy_json | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [Manage firewall for specified ports] *************************************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:139
Saturday 14 December 2024  11:32:05 -0500 (0:00:00.051)       0:02:01.545 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "podman_firewall | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [Manage selinux for specified ports] **************************************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:146
Saturday 14 December 2024  11:32:05 -0500 (0:00:00.067)       0:02:01.613 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "podman_selinux_ports | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Keep track of users that need to cancel linger] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:153
Saturday 14 December 2024  11:32:05 -0500 (0:00:00.048)       0:02:01.661 ***** 
ok: [managed-node1] => {
    "ansible_facts": {
        "__podman_cancel_user_linger": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Handle certs.d files - present] *******
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:157
Saturday 14 December 2024  11:32:05 -0500 (0:00:00.065)       0:02:01.727 ***** 
skipping: [managed-node1] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Handle credential files - present] ****
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:166
Saturday 14 December 2024  11:32:06 -0500 (0:00:00.045)       0:02:01.773 ***** 
skipping: [managed-node1] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Handle secrets] ***********************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:175
Saturday 14 December 2024  11:32:06 -0500 (0:00:00.044)       0:02:01.817 ***** 
skipping: [managed-node1] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Handle Kubernetes specifications] *****
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:182
Saturday 14 December 2024  11:32:06 -0500 (0:00:00.069)       0:02:01.886 ***** 
included: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml for managed-node1 => (item=(censored due to no_log))

TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:14
Saturday 14 December 2024  11:32:06 -0500 (0:00:00.206)       0:02:02.093 ***** 
ok: [managed-node1] => {
    "ansible_facts": {
        "__podman_kube_spec": {
            "state": "started"
        },
        "__podman_kube_str": "apiVersion: v1\nkind: Pod\nmetadata:\n    labels:\n        app: test\n        io.containers.autoupdate: registry\n    name: auth_test_1_kube\nspec:\n    containers:\n    -   image: localhost:5000/libpod/testimage:20210610\n        name: auth_test_1_kube\n"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:21
Saturday 14 December 2024  11:32:06 -0500 (0:00:00.130)       0:02:02.224 ***** 
ok: [managed-node1] => {
    "ansible_facts": {
        "__podman_continue_if_pull_fails": false,
        "__podman_kube": {
            "apiVersion": "v1",
            "kind": "Pod",
            "metadata": {
                "labels": {
                    "app": "test",
                    "io.containers.autoupdate": "registry"
                },
                "name": "auth_test_1_kube"
            },
            "spec": {
                "containers": [
                    {
                        "image": "localhost:5000/libpod/testimage:20210610",
                        "name": "auth_test_1_kube"
                    }
                ]
            }
        },
        "__podman_kube_file": "",
        "__podman_pull_image": true,
        "__podman_state": "created",
        "__podman_systemd_unit_scope": "",
        "__podman_user": "root"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:33
Saturday 14 December 2024  11:32:06 -0500 (0:00:00.070)       0:02:02.295 ***** 
ok: [managed-node1] => {
    "ansible_facts": {
        "__podman_kube_name": "auth_test_1_kube",
        "__podman_rootless": false
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Check user and group information] *****
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:38
Saturday 14 December 2024  11:32:06 -0500 (0:00:00.054)       0:02:02.349 ***** 
included: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1

TASK [fedora.linux_system_roles.podman : Get user information] *****************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2
Saturday 14 December 2024  11:32:06 -0500 (0:00:00.090)       0:02:02.440 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Fail if user does not exist] **********
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9
Saturday 14 December 2024  11:32:06 -0500 (0:00:00.041)       0:02:02.482 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set group for podman user] ************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16
Saturday 14 December 2024  11:32:06 -0500 (0:00:00.042)       0:02:02.524 ***** 
ok: [managed-node1] => {
    "ansible_facts": {
        "__podman_group": "0"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : See if getsubids exists] **************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:28
Saturday 14 December 2024  11:32:06 -0500 (0:00:00.053)       0:02:02.578 ***** 
ok: [managed-node1] => {
    "changed": false,
    "stat": {
        "atime": 1734193880.1628356,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 32,
        "charset": "binary",
        "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97",
        "ctime": 1734193861.1678128,
        "dev": 51714,
        "device_type": 0,
        "executable": true,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 8859182,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "application/x-pie-executable",
        "mode": "0755",
        "mtime": 1730678400.0,
        "nlink": 1,
        "path": "/usr/bin/getsubids",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": true,
        "rusr": true,
        "size": 15744,
        "uid": 0,
        "version": "2878164177",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": true,
        "xoth": true,
        "xusr": true
    }
}

TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:39
Saturday 14 December 2024  11:32:07 -0500 (0:00:00.444)       0:02:03.022 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "__podman_user not in [\"root\", \"0\"]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:44
Saturday 14 December 2024  11:32:07 -0500 (0:00:00.056)       0:02:03.079 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "__podman_user not in [\"root\", \"0\"]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:49
Saturday 14 December 2024  11:32:07 -0500 (0:00:00.052)       0:02:03.131 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "__podman_user not in [\"root\", \"0\"]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Get subuid file] **********************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:62
Saturday 14 December 2024  11:32:07 -0500 (0:00:00.054)       0:02:03.186 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Get subgid file] **********************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:67
Saturday 14 December 2024  11:32:07 -0500 (0:00:00.052)       0:02:03.238 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:72
Saturday 14 December 2024  11:32:07 -0500 (0:00:00.052)       0:02:03.290 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ******
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:82
Saturday 14 December 2024  11:32:07 -0500 (0:00:00.050)       0:02:03.340 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ******
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:89
Saturday 14 December 2024  11:32:07 -0500 (0:00:00.037)       0:02:03.378 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Fail if no kube spec is given] ********
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:43
Saturday 14 December 2024  11:32:07 -0500 (0:00:00.032)       0:02:03.411 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "not __podman_kube",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:52
Saturday 14 December 2024  11:32:07 -0500 (0:00:00.039)       0:02:03.450 ***** 
ok: [managed-node1] => {
    "ansible_facts": {
        "__podman_activate_systemd_unit": true,
        "__podman_systemd_scope": "system",
        "__podman_user_home_dir": "/root",
        "__podman_xdg_runtime_dir": "/run/user/0"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:60
Saturday 14 December 2024  11:32:07 -0500 (0:00:00.054)       0:02:03.505 ***** 
ok: [managed-node1] => {
    "ansible_facts": {
        "__podman_kube_path": "/etc/containers/ansible-kubernetes.d"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:64
Saturday 14 December 2024  11:32:07 -0500 (0:00:00.036)       0:02:03.541 ***** 
ok: [managed-node1] => {
    "ansible_facts": {
        "__podman_kube_file": "/etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:68
Saturday 14 December 2024  11:32:07 -0500 (0:00:00.035)       0:02:03.577 ***** 
ok: [managed-node1] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Get service name using systemd-escape] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:75
Saturday 14 December 2024  11:32:07 -0500 (0:00:00.084)       0:02:03.661 ***** 
ok: [managed-node1] => {
    "changed": false,
    "cmd": [
        "systemd-escape",
        "--template",
        "podman-kube@.service",
        "/etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml"
    ],
    "delta": "0:00:00.005231",
    "end": "2024-12-14 11:32:08.215356",
    "rc": 0,
    "start": "2024-12-14 11:32:08.210125"
}

STDOUT:

podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service

TASK [fedora.linux_system_roles.podman : Cleanup containers and services] ******
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:83
Saturday 14 December 2024  11:32:08 -0500 (0:00:00.394)       0:02:04.056 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "__podman_state == \"absent\"",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Create and update containers and services] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:87
Saturday 14 December 2024  11:32:08 -0500 (0:00:00.049)       0:02:04.106 ***** 
included: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml for managed-node1

TASK [fedora.linux_system_roles.podman : Manage linger] ************************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:2
Saturday 14 December 2024  11:32:08 -0500 (0:00:00.079)       0:02:04.186 ***** 
included: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node1

TASK [fedora.linux_system_roles.podman : Enable linger if needed] **************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12
Saturday 14 December 2024  11:32:08 -0500 (0:00:00.053)       0:02:04.239 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "__podman_rootless | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18
Saturday 14 December 2024  11:32:08 -0500 (0:00:00.030)       0:02:04.269 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "__podman_rootless | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22
Saturday 14 December 2024  11:32:08 -0500 (0:00:00.031)       0:02:04.301 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "__podman_rootless | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Get the host mount volumes] ***********
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:7
Saturday 14 December 2024  11:32:08 -0500 (0:00:00.030)       0:02:04.332 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "podman_create_host_directories | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Create host directories] **************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:18
Saturday 14 December 2024  11:32:08 -0500 (0:00:00.031)       0:02:04.364 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "podman_create_host_directories | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Ensure container images are present] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:29
Saturday 14 December 2024  11:32:08 -0500 (0:00:00.036)       0:02:04.400 ***** 
failed: [managed-node1] (item=None) => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}
fatal: [managed-node1]: FAILED! => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [Check error] *************************************************************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/tests/podman/tests_auth_and_security.yml:99
Saturday 14 December 2024  11:32:09 -0500 (0:00:00.665)       0:02:05.065 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "not expected_msg in ansible_failed_result.results[0].msg | d(\"\")",
    "skip_reason": "Conditional result was False"
}

TASK [Create a local tmpdir] ***************************************************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/tests/podman/tests_auth_and_security.yml:110
Saturday 14 December 2024  11:32:09 -0500 (0:00:00.067)       0:02:05.133 ***** 
changed: [managed-node1 -> localhost] => {
    "changed": true,
    "gid": 0,
    "group": "root",
    "mode": "0700",
    "owner": "root",
    "path": "/tmp/lsr_sine48u0_podman",
    "secontext": "unconfined_u:object_r:user_tmp_t:s0",
    "size": 6,
    "state": "directory",
    "uid": 0
}

TASK [Run the role with credentials in spec and CA cert] ***********************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/tests/podman/tests_auth_and_security.yml:118
Saturday 14 December 2024  11:32:09 -0500 (0:00:00.488)       0:02:05.621 ***** 
included: fedora.linux_system_roles.podman for managed-node1

TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:3
Saturday 14 December 2024  11:32:10 -0500 (0:00:00.230)       0:02:05.851 ***** 
included: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml for managed-node1

TASK [fedora.linux_system_roles.podman : Ensure ansible_facts used by role] ****
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:3
Saturday 14 December 2024  11:32:10 -0500 (0:00:00.109)       0:02:05.961 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "__podman_required_facts | difference(ansible_facts.keys() | list) | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Check if system is ostree] ************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:11
Saturday 14 December 2024  11:32:10 -0500 (0:00:00.075)       0:02:06.037 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "not __podman_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set flag to indicate system is ostree] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:16
Saturday 14 December 2024  11:32:10 -0500 (0:00:00.052)       0:02:06.089 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "not __podman_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Check if transactional-update exists in /sbin] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:23
Saturday 14 December 2024  11:32:10 -0500 (0:00:00.129)       0:02:06.219 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "not __podman_is_transactional is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set flag if transactional-update exists] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:28
Saturday 14 December 2024  11:32:10 -0500 (0:00:00.062)       0:02:06.282 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "not __podman_is_transactional is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:32
Saturday 14 December 2024  11:32:10 -0500 (0:00:00.053)       0:02:06.335 ***** 
ok: [managed-node1] => (item=RedHat.yml) => {
    "ansible_facts": {
        "__podman_packages": [
            "podman",
            "shadow-utils-subid"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/vars/RedHat.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "RedHat.yml"
}
skipping: [managed-node1] => (item=CentOS.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "CentOS.yml",
    "skip_reason": "Conditional result was False"
}
ok: [managed-node1] => (item=CentOS_10.yml) => {
    "ansible_facts": {
        "__podman_packages": [
            "iptables-nft",
            "podman",
            "shadow-utils-subid"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_10.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_10.yml"
}
ok: [managed-node1] => (item=CentOS_10.yml) => {
    "ansible_facts": {
        "__podman_packages": [
            "iptables-nft",
            "podman",
            "shadow-utils-subid"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_10.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_10.yml"
}

TASK [fedora.linux_system_roles.podman : Gather the package facts] *************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6
Saturday 14 December 2024  11:32:10 -0500 (0:00:00.141)       0:02:06.477 ***** 
ok: [managed-node1] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Enable copr if requested] *************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:10
Saturday 14 December 2024  11:32:11 -0500 (0:00:00.967)       0:02:07.445 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "podman_use_copr | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Ensure required packages are installed] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:14
Saturday 14 December 2024  11:32:11 -0500 (0:00:00.062)       0:02:07.508 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "(__podman_packages | difference(ansible_facts.packages))",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Notify user that reboot is needed to apply changes] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:28
Saturday 14 December 2024  11:32:11 -0500 (0:00:00.069)       0:02:07.578 ***** 
skipping: [managed-node1] => {
    "false_condition": "__podman_is_transactional | d(false)"
}

TASK [fedora.linux_system_roles.podman : Reboot transactional update systems] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:33
Saturday 14 December 2024  11:32:11 -0500 (0:00:00.055)       0:02:07.633 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "__podman_is_transactional | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Fail if reboot is needed and not set] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:38
Saturday 14 December 2024  11:32:11 -0500 (0:00:00.055)       0:02:07.688 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "__podman_is_transactional | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Get podman version] *******************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:46
Saturday 14 December 2024  11:32:11 -0500 (0:00:00.055)       0:02:07.744 ***** 
ok: [managed-node1] => {
    "changed": false,
    "cmd": [
        "podman",
        "--version"
    ],
    "delta": "0:00:00.026012",
    "end": "2024-12-14 11:32:12.362056",
    "rc": 0,
    "start": "2024-12-14 11:32:12.336044"
}

STDOUT:

podman version 5.3.1


STDERR:

time="2024-12-14T11:32:12-05:00" level=warning msg="Failed to decode the keys [\"storage.options.overlay.pull_options\" \"storage.options.overlay.pull_options\" \"storage.options.overlay.pull_options\" \"storage.options.overlay.pull_options.enable_partial_images\" \"storage.options.overlay.pull_options.use_hard_links\" \"storage.options.overlay.pull_options.ostree_repos\" \"storage.options.overlay.pull_options.convert_images\"] from \"/usr/share/containers/storage.conf\""

TASK [fedora.linux_system_roles.podman : Set podman version] *******************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:52
Saturday 14 December 2024  11:32:12 -0500 (0:00:00.454)       0:02:08.199 ***** 
ok: [managed-node1] => {
    "ansible_facts": {
        "podman_version": "5.3.1"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Podman package version must be 4.2 or later] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:56
Saturday 14 December 2024  11:32:12 -0500 (0:00:00.043)       0:02:08.242 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "podman_version is version(\"4.2\", \"<\")",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:63
Saturday 14 December 2024  11:32:12 -0500 (0:00:00.048)       0:02:08.291 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "podman_version is version(\"4.4\", \"<\")",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:73
Saturday 14 December 2024  11:32:12 -0500 (0:00:00.096)       0:02:08.388 ***** 
META: end_host conditional evaluated to False, continuing execution for managed-node1
skipping: [managed-node1] => {
    "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node1"
}

MSG:

end_host conditional evaluated to false, continuing execution for managed-node1

TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:80
Saturday 14 December 2024  11:32:12 -0500 (0:00:00.081)       0:02:08.469 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "__has_type_pod or __has_pod_file_ext or __has_pod_file_src_ext or __has_pod_template_src_ext or __has_pod_template_src_ext_j2",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:96
Saturday 14 December 2024  11:32:12 -0500 (0:00:00.259)       0:02:08.729 ***** 
META: end_host conditional evaluated to False, continuing execution for managed-node1
skipping: [managed-node1] => {
    "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node1"
}

MSG:

end_host conditional evaluated to false, continuing execution for managed-node1

TASK [fedora.linux_system_roles.podman : Check user and group information] *****
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:109
Saturday 14 December 2024  11:32:13 -0500 (0:00:00.358)       0:02:09.087 ***** 
included: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1

TASK [fedora.linux_system_roles.podman : Get user information] *****************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2
Saturday 14 December 2024  11:32:13 -0500 (0:00:00.154)       0:02:09.241 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Fail if user does not exist] **********
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9
Saturday 14 December 2024  11:32:13 -0500 (0:00:00.058)       0:02:09.300 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set group for podman user] ************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16
Saturday 14 December 2024  11:32:13 -0500 (0:00:00.058)       0:02:09.359 ***** 
ok: [managed-node1] => {
    "ansible_facts": {
        "__podman_group": "0"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : See if getsubids exists] **************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:28
Saturday 14 December 2024  11:32:13 -0500 (0:00:00.109)       0:02:09.468 ***** 
ok: [managed-node1] => {
    "changed": false,
    "stat": {
        "atime": 1734193880.1628356,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 32,
        "charset": "binary",
        "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97",
        "ctime": 1734193861.1678128,
        "dev": 51714,
        "device_type": 0,
        "executable": true,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 8859182,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "application/x-pie-executable",
        "mode": "0755",
        "mtime": 1730678400.0,
        "nlink": 1,
        "path": "/usr/bin/getsubids",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": true,
        "rusr": true,
        "size": 15744,
        "uid": 0,
        "version": "2878164177",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": true,
        "xoth": true,
        "xusr": true
    }
}

TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:39
Saturday 14 December 2024  11:32:14 -0500 (0:00:00.429)       0:02:09.898 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "__podman_user not in [\"root\", \"0\"]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:44
Saturday 14 December 2024  11:32:14 -0500 (0:00:00.042)       0:02:09.941 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "__podman_user not in [\"root\", \"0\"]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:49
Saturday 14 December 2024  11:32:14 -0500 (0:00:00.051)       0:02:09.992 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "__podman_user not in [\"root\", \"0\"]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Get subuid file] **********************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:62
Saturday 14 December 2024  11:32:14 -0500 (0:00:00.051)       0:02:10.044 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Get subgid file] **********************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:67
Saturday 14 December 2024  11:32:14 -0500 (0:00:00.038)       0:02:10.082 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:72
Saturday 14 December 2024  11:32:14 -0500 (0:00:00.034)       0:02:10.117 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ******
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:82
Saturday 14 December 2024  11:32:14 -0500 (0:00:00.038)       0:02:10.156 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ******
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:89
Saturday 14 December 2024  11:32:14 -0500 (0:00:00.035)       0:02:10.192 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set config file paths] ****************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:115
Saturday 14 December 2024  11:32:14 -0500 (0:00:00.037)       0:02:10.229 ***** 
ok: [managed-node1] => {
    "ansible_facts": {
        "__podman_container_conf_file": "/etc/containers/containers.conf.d/50-systemroles.conf",
        "__podman_policy_json_file": "/etc/containers/policy.json",
        "__podman_registries_conf_file": "/etc/containers/registries.conf.d/50-systemroles.conf",
        "__podman_storage_conf_file": "/etc/containers/storage.conf"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Handle container.conf.d] **************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:124
Saturday 14 December 2024  11:32:14 -0500 (0:00:00.051)       0:02:10.281 ***** 
included: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml for managed-node1

TASK [fedora.linux_system_roles.podman : Ensure containers.d exists] ***********
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:5
Saturday 14 December 2024  11:32:14 -0500 (0:00:00.071)       0:02:10.352 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "podman_containers_conf | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Update container config file] *********
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:13
Saturday 14 December 2024  11:32:14 -0500 (0:00:00.037)       0:02:10.390 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "podman_containers_conf | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Handle registries.conf.d] *************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:127
Saturday 14 December 2024  11:32:14 -0500 (0:00:00.032)       0:02:10.422 ***** 
included: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml for managed-node1

TASK [fedora.linux_system_roles.podman : Ensure registries.d exists] ***********
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:5
Saturday 14 December 2024  11:32:14 -0500 (0:00:00.061)       0:02:10.483 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "podman_registries_conf | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Update registries config file] ********
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:13
Saturday 14 December 2024  11:32:14 -0500 (0:00:00.163)       0:02:10.646 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "podman_registries_conf | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Handle storage.conf] ******************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:130
Saturday 14 December 2024  11:32:14 -0500 (0:00:00.041)       0:02:10.688 ***** 
included: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml for managed-node1

TASK [fedora.linux_system_roles.podman : Ensure storage.conf parent dir exists] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:5
Saturday 14 December 2024  11:32:15 -0500 (0:00:00.088)       0:02:10.776 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "podman_storage_conf | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Update storage config file] ***********
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:13
Saturday 14 December 2024  11:32:15 -0500 (0:00:00.041)       0:02:10.818 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "podman_storage_conf | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Handle policy.json] *******************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:133
Saturday 14 December 2024  11:32:15 -0500 (0:00:00.032)       0:02:10.851 ***** 
included: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml for managed-node1

TASK [fedora.linux_system_roles.podman : Ensure policy.json parent dir exists] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:6
Saturday 14 December 2024  11:32:15 -0500 (0:00:00.061)       0:02:10.912 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "podman_policy_json | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Stat the policy.json file] ************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:14
Saturday 14 December 2024  11:32:15 -0500 (0:00:00.032)       0:02:10.945 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "podman_policy_json | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Get the existing policy.json] *********
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:19
Saturday 14 December 2024  11:32:15 -0500 (0:00:00.032)       0:02:10.977 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "podman_policy_json | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Write new policy.json file] ***********
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:25
Saturday 14 December 2024  11:32:15 -0500 (0:00:00.041)       0:02:11.019 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "podman_policy_json | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [Manage firewall for specified ports] *************************************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:139
Saturday 14 December 2024  11:32:15 -0500 (0:00:00.084)       0:02:11.103 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "podman_firewall | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [Manage selinux for specified ports] **************************************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:146
Saturday 14 December 2024  11:32:15 -0500 (0:00:00.055)       0:02:11.158 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "podman_selinux_ports | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Keep track of users that need to cancel linger] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:153
Saturday 14 December 2024  11:32:15 -0500 (0:00:00.054)       0:02:11.212 ***** 
ok: [managed-node1] => {
    "ansible_facts": {
        "__podman_cancel_user_linger": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Handle certs.d files - present] *******
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:157
Saturday 14 December 2024  11:32:15 -0500 (0:00:00.062)       0:02:11.274 ***** 
included: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_certs_d.yml for managed-node1 => (item=(censored due to no_log))

TASK [fedora.linux_system_roles.podman : Check given registry_host] ************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_certs_d.yml:3
Saturday 14 December 2024  11:32:15 -0500 (0:00:00.121)       0:02:11.396 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "__podman_cert_spec_item[\"registry_host\"] is search(\"/\")",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set per-cert spec variables part 0] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_certs_d.yml:10
Saturday 14 December 2024  11:32:15 -0500 (0:00:00.050)       0:02:11.446 ***** 
ok: [managed-node1] => {
    "ansible_facts": {
        "__podman_user": "root"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Set per-cert spec variables part 1] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_certs_d.yml:14
Saturday 14 December 2024  11:32:15 -0500 (0:00:00.061)       0:02:11.507 ***** 
ok: [managed-node1] => {
    "ansible_facts": {
        "__podman_rootless": false
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Check user and group information] *****
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_certs_d.yml:18
Saturday 14 December 2024  11:32:15 -0500 (0:00:00.106)       0:02:11.614 ***** 
included: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1

TASK [fedora.linux_system_roles.podman : Get user information] *****************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2
Saturday 14 December 2024  11:32:16 -0500 (0:00:00.197)       0:02:11.811 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Fail if user does not exist] **********
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9
Saturday 14 December 2024  11:32:16 -0500 (0:00:00.061)       0:02:11.873 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set group for podman user] ************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16
Saturday 14 December 2024  11:32:16 -0500 (0:00:00.062)       0:02:11.935 ***** 
ok: [managed-node1] => {
    "ansible_facts": {
        "__podman_group": "0"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : See if getsubids exists] **************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:28
Saturday 14 December 2024  11:32:16 -0500 (0:00:00.091)       0:02:12.027 ***** 
ok: [managed-node1] => {
    "changed": false,
    "stat": {
        "atime": 1734193880.1628356,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 32,
        "charset": "binary",
        "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97",
        "ctime": 1734193861.1678128,
        "dev": 51714,
        "device_type": 0,
        "executable": true,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 8859182,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "application/x-pie-executable",
        "mode": "0755",
        "mtime": 1730678400.0,
        "nlink": 1,
        "path": "/usr/bin/getsubids",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": true,
        "rusr": true,
        "size": 15744,
        "uid": 0,
        "version": "2878164177",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": true,
        "xoth": true,
        "xusr": true
    }
}

TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:39
Saturday 14 December 2024  11:32:16 -0500 (0:00:00.496)       0:02:12.523 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "__podman_user not in [\"root\", \"0\"]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:44
Saturday 14 December 2024  11:32:16 -0500 (0:00:00.052)       0:02:12.576 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "__podman_user not in [\"root\", \"0\"]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:49
Saturday 14 December 2024  11:32:16 -0500 (0:00:00.079)       0:02:12.655 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "__podman_user not in [\"root\", \"0\"]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Get subuid file] **********************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:62
Saturday 14 December 2024  11:32:16 -0500 (0:00:00.100)       0:02:12.756 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Get subgid file] **********************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:67
Saturday 14 December 2024  11:32:17 -0500 (0:00:00.121)       0:02:12.877 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:72
Saturday 14 December 2024  11:32:17 -0500 (0:00:00.085)       0:02:12.963 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ******
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:82
Saturday 14 December 2024  11:32:17 -0500 (0:00:00.057)       0:02:13.020 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ******
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:89
Saturday 14 December 2024  11:32:17 -0500 (0:00:00.046)       0:02:13.067 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set per-cert spec variables part 2] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_certs_d.yml:23
Saturday 14 December 2024  11:32:17 -0500 (0:00:00.045)       0:02:13.113 ***** 
ok: [managed-node1] => {
    "ansible_facts": {
        "__podman_user_home_dir": "/root"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Set per-cert spec variables part 3] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_certs_d.yml:27
Saturday 14 December 2024  11:32:17 -0500 (0:00:00.043)       0:02:13.156 ***** 
ok: [managed-node1] => {
    "ansible_facts": {
        "__podman_certs_d_path": "/etc/containers/certs.d/localhost:5000"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Set per-cert spec variables part 4] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_certs_d.yml:31
Saturday 14 December 2024  11:32:17 -0500 (0:00:00.036)       0:02:13.193 ***** 
ok: [managed-node1] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Ensure certs.d directory] *************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_certs_d.yml:50
Saturday 14 December 2024  11:32:17 -0500 (0:00:00.045)       0:02:13.238 ***** 
changed: [managed-node1] => {
    "changed": true,
    "gid": 0,
    "group": "root",
    "mode": "0700",
    "owner": "root",
    "path": "/etc/containers/certs.d/localhost:5000",
    "secontext": "unconfined_u:object_r:etc_t:s0",
    "size": 6,
    "state": "directory",
    "uid": 0
}

TASK [fedora.linux_system_roles.podman : Ensure certs.d files] *****************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_certs_d.yml:58
Saturday 14 December 2024  11:32:17 -0500 (0:00:00.407)       0:02:13.646 ***** 
skipping: [managed-node1] => (item=None)  => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}
skipping: [managed-node1] => (item=None)  => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}
changed: [managed-node1] => (item=None) => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": true
}
changed: [managed-node1] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": true
}

TASK [fedora.linux_system_roles.podman : Remove certs.d files] *****************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_certs_d.yml:75
Saturday 14 December 2024  11:32:18 -0500 (0:00:00.833)       0:02:14.480 ***** 
skipping: [managed-node1] => (item=None)  => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}
skipping: [managed-node1] => (item=None)  => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}
skipping: [managed-node1] => (item=None)  => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}
skipping: [managed-node1] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Find files in certs.d directory] ******
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_certs_d.yml:82
Saturday 14 December 2024  11:32:18 -0500 (0:00:00.063)       0:02:14.544 ***** 
skipping: [managed-node1] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Ensure the certs.d directory is absent if empty] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_certs_d.yml:90
Saturday 14 December 2024  11:32:18 -0500 (0:00:00.041)       0:02:14.585 ***** 
skipping: [managed-node1] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Handle credential files - present] ****
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:166
Saturday 14 December 2024  11:32:18 -0500 (0:00:00.059)       0:02:14.645 ***** 
skipping: [managed-node1] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Handle secrets] ***********************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:175
Saturday 14 December 2024  11:32:18 -0500 (0:00:00.054)       0:02:14.699 ***** 
skipping: [managed-node1] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Handle Kubernetes specifications] *****
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:182
Saturday 14 December 2024  11:32:19 -0500 (0:00:00.113)       0:02:14.813 ***** 
included: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml for managed-node1 => (item=(censored due to no_log))

TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:14
Saturday 14 December 2024  11:32:19 -0500 (0:00:00.252)       0:02:15.065 ***** 
ok: [managed-node1] => {
    "ansible_facts": {
        "__podman_kube_spec": {
            "state": "started"
        },
        "__podman_kube_str": "apiVersion: v1\nkind: Pod\nmetadata:\n    labels:\n        app: test\n        io.containers.autoupdate: registry\n    name: auth_test_1_kube\nspec:\n    containers:\n    -   image: localhost:5000/libpod/testimage:20210610\n        name: auth_test_1_kube\n"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:21
Saturday 14 December 2024  11:32:19 -0500 (0:00:00.111)       0:02:15.177 ***** 
ok: [managed-node1] => {
    "ansible_facts": {
        "__podman_continue_if_pull_fails": false,
        "__podman_kube": {
            "apiVersion": "v1",
            "kind": "Pod",
            "metadata": {
                "labels": {
                    "app": "test",
                    "io.containers.autoupdate": "registry"
                },
                "name": "auth_test_1_kube"
            },
            "spec": {
                "containers": [
                    {
                        "image": "localhost:5000/libpod/testimage:20210610",
                        "name": "auth_test_1_kube"
                    }
                ]
            }
        },
        "__podman_kube_file": "",
        "__podman_pull_image": true,
        "__podman_state": "created",
        "__podman_systemd_unit_scope": "",
        "__podman_user": "root"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:33
Saturday 14 December 2024  11:32:19 -0500 (0:00:00.074)       0:02:15.252 ***** 
ok: [managed-node1] => {
    "ansible_facts": {
        "__podman_kube_name": "auth_test_1_kube",
        "__podman_rootless": false
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Check user and group information] *****
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:38
Saturday 14 December 2024  11:32:19 -0500 (0:00:00.064)       0:02:15.316 ***** 
included: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1

TASK [fedora.linux_system_roles.podman : Get user information] *****************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2
Saturday 14 December 2024  11:32:19 -0500 (0:00:00.116)       0:02:15.432 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Fail if user does not exist] **********
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9
Saturday 14 December 2024  11:32:19 -0500 (0:00:00.060)       0:02:15.493 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set group for podman user] ************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16
Saturday 14 December 2024  11:32:19 -0500 (0:00:00.062)       0:02:15.555 ***** 
ok: [managed-node1] => {
    "ansible_facts": {
        "__podman_group": "0"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : See if getsubids exists] **************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:28
Saturday 14 December 2024  11:32:19 -0500 (0:00:00.091)       0:02:15.647 ***** 
ok: [managed-node1] => {
    "changed": false,
    "stat": {
        "atime": 1734193880.1628356,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 32,
        "charset": "binary",
        "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97",
        "ctime": 1734193861.1678128,
        "dev": 51714,
        "device_type": 0,
        "executable": true,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 8859182,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "application/x-pie-executable",
        "mode": "0755",
        "mtime": 1730678400.0,
        "nlink": 1,
        "path": "/usr/bin/getsubids",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": true,
        "rusr": true,
        "size": 15744,
        "uid": 0,
        "version": "2878164177",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": true,
        "xoth": true,
        "xusr": true
    }
}

TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:39
Saturday 14 December 2024  11:32:20 -0500 (0:00:00.435)       0:02:16.082 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "__podman_user not in [\"root\", \"0\"]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:44
Saturday 14 December 2024  11:32:20 -0500 (0:00:00.037)       0:02:16.119 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "__podman_user not in [\"root\", \"0\"]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:49
Saturday 14 December 2024  11:32:20 -0500 (0:00:00.050)       0:02:16.170 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "__podman_user not in [\"root\", \"0\"]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Get subuid file] **********************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:62
Saturday 14 December 2024  11:32:20 -0500 (0:00:00.052)       0:02:16.222 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Get subgid file] **********************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:67
Saturday 14 December 2024  11:32:20 -0500 (0:00:00.055)       0:02:16.278 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:72
Saturday 14 December 2024  11:32:20 -0500 (0:00:00.054)       0:02:16.332 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ******
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:82
Saturday 14 December 2024  11:32:20 -0500 (0:00:00.054)       0:02:16.387 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ******
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:89
Saturday 14 December 2024  11:32:20 -0500 (0:00:00.060)       0:02:16.447 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Fail if no kube spec is given] ********
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:43
Saturday 14 December 2024  11:32:20 -0500 (0:00:00.052)       0:02:16.499 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "not __podman_kube",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:52
Saturday 14 December 2024  11:32:20 -0500 (0:00:00.071)       0:02:16.571 ***** 
ok: [managed-node1] => {
    "ansible_facts": {
        "__podman_activate_systemd_unit": true,
        "__podman_systemd_scope": "system",
        "__podman_user_home_dir": "/root",
        "__podman_xdg_runtime_dir": "/run/user/0"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:60
Saturday 14 December 2024  11:32:20 -0500 (0:00:00.079)       0:02:16.651 ***** 
ok: [managed-node1] => {
    "ansible_facts": {
        "__podman_kube_path": "/etc/containers/ansible-kubernetes.d"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:64
Saturday 14 December 2024  11:32:21 -0500 (0:00:00.127)       0:02:16.779 ***** 
ok: [managed-node1] => {
    "ansible_facts": {
        "__podman_kube_file": "/etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:68
Saturday 14 December 2024  11:32:21 -0500 (0:00:00.076)       0:02:16.855 ***** 
ok: [managed-node1] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Get service name using systemd-escape] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:75
Saturday 14 December 2024  11:32:21 -0500 (0:00:00.069)       0:02:16.925 ***** 
ok: [managed-node1] => {
    "changed": false,
    "cmd": [
        "systemd-escape",
        "--template",
        "podman-kube@.service",
        "/etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml"
    ],
    "delta": "0:00:00.005199",
    "end": "2024-12-14 11:32:21.522896",
    "rc": 0,
    "start": "2024-12-14 11:32:21.517697"
}

STDOUT:

podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service

TASK [fedora.linux_system_roles.podman : Cleanup containers and services] ******
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:83
Saturday 14 December 2024  11:32:21 -0500 (0:00:00.455)       0:02:17.381 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "__podman_state == \"absent\"",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Create and update containers and services] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:87
Saturday 14 December 2024  11:32:21 -0500 (0:00:00.054)       0:02:17.435 ***** 
included: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml for managed-node1

TASK [fedora.linux_system_roles.podman : Manage linger] ************************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:2
Saturday 14 December 2024  11:32:21 -0500 (0:00:00.115)       0:02:17.551 ***** 
included: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node1

TASK [fedora.linux_system_roles.podman : Enable linger if needed] **************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12
Saturday 14 December 2024  11:32:21 -0500 (0:00:00.102)       0:02:17.654 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "__podman_rootless | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18
Saturday 14 December 2024  11:32:21 -0500 (0:00:00.059)       0:02:17.713 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "__podman_rootless | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22
Saturday 14 December 2024  11:32:22 -0500 (0:00:00.071)       0:02:17.784 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "__podman_rootless | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Get the host mount volumes] ***********
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:7
Saturday 14 December 2024  11:32:22 -0500 (0:00:00.053)       0:02:17.838 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "podman_create_host_directories | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Create host directories] **************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:18
Saturday 14 December 2024  11:32:22 -0500 (0:00:00.049)       0:02:17.888 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "podman_create_host_directories | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Ensure container images are present] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:29
Saturday 14 December 2024  11:32:22 -0500 (0:00:00.052)       0:02:17.940 ***** 
changed: [managed-node1] => (item=None) => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": true
}
changed: [managed-node1] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": true
}

TASK [fedora.linux_system_roles.podman : Check the kubernetes yaml file] *******
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:53
Saturday 14 December 2024  11:32:22 -0500 (0:00:00.782)       0:02:18.723 ***** 
ok: [managed-node1] => {
    "changed": false,
    "failed_when_result": false,
    "stat": {
        "exists": false
    }
}

TASK [fedora.linux_system_roles.podman : Ensure the kubernetes directory is present] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:61
Saturday 14 December 2024  11:32:23 -0500 (0:00:00.417)       0:02:19.141 ***** 
changed: [managed-node1] => {
    "changed": true,
    "gid": 0,
    "group": "root",
    "mode": "0755",
    "owner": "root",
    "path": "/etc/containers/ansible-kubernetes.d",
    "secontext": "unconfined_u:object_r:etc_t:s0",
    "size": 6,
    "state": "directory",
    "uid": 0
}

TASK [fedora.linux_system_roles.podman : Ensure kubernetes yaml files are present] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:70
Saturday 14 December 2024  11:32:23 -0500 (0:00:00.395)       0:02:19.537 ***** 
changed: [managed-node1] => {
    "changed": true,
    "checksum": "fb0097683a2e5c8909a8037d64ddc1b350aed0be",
    "dest": "/etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml",
    "gid": 0,
    "group": "root",
    "md5sum": "8db65ab83723a021adc90b4f543fdca0",
    "mode": "0644",
    "owner": "root",
    "secontext": "system_u:object_r:etc_t:s0",
    "size": 244,
    "src": "/root/.ansible/tmp/ansible-tmp-1734193943.8234994-10632-256821299067559/.source.yml",
    "state": "file",
    "uid": 0
}

TASK [fedora.linux_system_roles.podman : Update containers/pods] ***************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:80
Saturday 14 December 2024  11:32:24 -0500 (0:00:00.758)       0:02:20.295 ***** 
[WARNING]: Using a variable for a task's 'args' is unsafe in some situations
(see
https://docs.ansible.com/ansible/devel/reference_appendices/faq.html#argsplat-
unsafe)
fatal: [managed-node1]: FAILED! => {
    "changed": false
}

MSG:

Output: 
Error=time="2024-12-14T11:32:25-05:00" level=warning msg="Failed to decode the keys [\"storage.options.overlay.pull_options\" \"storage.options.overlay.pull_options\" \"storage.options.overlay.pull_options\" \"storage.options.overlay.pull_options.enable_partial_images\" \"storage.options.overlay.pull_options.use_hard_links\" \"storage.options.overlay.pull_options.ostree_repos\" \"storage.options.overlay.pull_options.convert_images\"] from \"/usr/share/containers/storage.conf\""
Error: building local pause image: finding pause binary: exec: "catatonit": executable file not found in $PATH


TASK [Dump journal] ************************************************************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/tests/podman/tests_auth_and_security.yml:199
Saturday 14 December 2024  11:32:25 -0500 (0:00:00.866)       0:02:21.162 ***** 
fatal: [managed-node1]: FAILED! => {
    "changed": false,
    "cmd": [
        "journalctl",
        "-ex"
    ],
    "delta": "0:00:00.029099",
    "end": "2024-12-14 11:32:25.769347",
    "failed_when_result": true,
    "rc": 0,
    "start": "2024-12-14 11:32:25.740248"
}

STDOUT:

Dec 14 11:26:27 localhost systemd[1]: Stopped dracut-pre-mount.service - dracut pre-mount hook.
░░ Subject: A stop job for unit dracut-pre-mount.service has finished
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A stop job for unit dracut-pre-mount.service has finished.
░░ 
░░ The job identifier is 118 and the job result is done.
Dec 14 11:26:27 localhost systemd[1]: systemd-sysctl.service: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ The unit systemd-sysctl.service has successfully entered the 'dead' state.
Dec 14 11:26:27 localhost systemd[1]: Stopped systemd-sysctl.service - Apply Kernel Variables.
░░ Subject: A stop job for unit systemd-sysctl.service has finished
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A stop job for unit systemd-sysctl.service has finished.
░░ 
░░ The job identifier is 103 and the job result is done.
Dec 14 11:26:27 localhost systemd[1]: systemd-tmpfiles-setup.service: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ The unit systemd-tmpfiles-setup.service has successfully entered the 'dead' state.
Dec 14 11:26:27 localhost systemd[1]: Stopped systemd-tmpfiles-setup.service - Create System Files and Directories.
░░ Subject: A stop job for unit systemd-tmpfiles-setup.service has finished
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A stop job for unit systemd-tmpfiles-setup.service has finished.
░░ 
░░ The job identifier is 94 and the job result is done.
Dec 14 11:26:27 localhost systemd[1]: Stopped target local-fs.target - Local File Systems.
░░ Subject: A stop job for unit local-fs.target has finished
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A stop job for unit local-fs.target has finished.
░░ 
░░ The job identifier is 117 and the job result is done.
Dec 14 11:26:27 localhost systemd[1]: Stopped target local-fs-pre.target - Preparation for Local File Systems.
░░ Subject: A stop job for unit local-fs-pre.target has finished
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A stop job for unit local-fs-pre.target has finished.
░░ 
░░ The job identifier is 107 and the job result is done.
Dec 14 11:26:27 localhost systemd[1]: systemd-udev-trigger.service: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ The unit systemd-udev-trigger.service has successfully entered the 'dead' state.
Dec 14 11:26:27 localhost systemd[1]: Stopped systemd-udev-trigger.service - Coldplug All udev Devices.
░░ Subject: A stop job for unit systemd-udev-trigger.service has finished
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A stop job for unit systemd-udev-trigger.service has finished.
░░ 
░░ The job identifier is 74 and the job result is done.
Dec 14 11:26:27 localhost systemd[1]: Stopping systemd-udevd.service - Rule-based Manager for Device Events and Files...
░░ Subject: A stop job for unit systemd-udevd.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A stop job for unit systemd-udevd.service has begun execution.
░░ 
░░ The job identifier is 75.
Dec 14 11:26:27 localhost systemd[1]: systemd-vconsole-setup.service: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ The unit systemd-vconsole-setup.service has successfully entered the 'dead' state.
Dec 14 11:26:27 localhost systemd[1]: Stopped systemd-vconsole-setup.service - Virtual Console Setup.
░░ Subject: A stop job for unit systemd-vconsole-setup.service has finished
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A stop job for unit systemd-vconsole-setup.service has finished.
░░ 
░░ The job identifier is 109 and the job result is done.
Dec 14 11:26:27 localhost systemd[1]: initrd-cleanup.service: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ The unit initrd-cleanup.service has successfully entered the 'dead' state.
Dec 14 11:26:27 localhost systemd[1]: Finished initrd-cleanup.service - Cleaning Up and Shutting Down Daemons.
░░ Subject: A start job for unit initrd-cleanup.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit initrd-cleanup.service has finished successfully.
░░ 
░░ The job identifier is 64.
Dec 14 11:26:27 localhost systemd[1]: systemd-udevd.service: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ The unit systemd-udevd.service has successfully entered the 'dead' state.
Dec 14 11:26:27 localhost systemd[1]: Stopped systemd-udevd.service - Rule-based Manager for Device Events and Files.
░░ Subject: A stop job for unit systemd-udevd.service has finished
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A stop job for unit systemd-udevd.service has finished.
░░ 
░░ The job identifier is 75 and the job result is done.
Dec 14 11:26:27 localhost systemd[1]: systemd-udevd-control.socket: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ The unit systemd-udevd-control.socket has successfully entered the 'dead' state.
Dec 14 11:26:27 localhost systemd[1]: Closed systemd-udevd-control.socket - udev Control Socket.
░░ Subject: A stop job for unit systemd-udevd-control.socket has finished
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A stop job for unit systemd-udevd-control.socket has finished.
░░ 
░░ The job identifier is 71 and the job result is done.
Dec 14 11:26:27 localhost systemd[1]: systemd-udevd-kernel.socket: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ The unit systemd-udevd-kernel.socket has successfully entered the 'dead' state.
Dec 14 11:26:27 localhost systemd[1]: Closed systemd-udevd-kernel.socket - udev Kernel Socket.
░░ Subject: A stop job for unit systemd-udevd-kernel.socket has finished
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A stop job for unit systemd-udevd-kernel.socket has finished.
░░ 
░░ The job identifier is 73 and the job result is done.
Dec 14 11:26:27 localhost systemd[1]: dracut-pre-udev.service: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ The unit dracut-pre-udev.service has successfully entered the 'dead' state.
Dec 14 11:26:27 localhost systemd[1]: Stopped dracut-pre-udev.service - dracut pre-udev hook.
░░ Subject: A stop job for unit dracut-pre-udev.service has finished
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A stop job for unit dracut-pre-udev.service has finished.
░░ 
░░ The job identifier is 105 and the job result is done.
Dec 14 11:26:27 localhost systemd[1]: dracut-cmdline.service: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ The unit dracut-cmdline.service has successfully entered the 'dead' state.
Dec 14 11:26:27 localhost systemd[1]: Stopped dracut-cmdline.service - dracut cmdline hook.
░░ Subject: A stop job for unit dracut-cmdline.service has finished
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A stop job for unit dracut-cmdline.service has finished.
░░ 
░░ The job identifier is 113 and the job result is done.
Dec 14 11:26:27 localhost systemd[1]: Starting initrd-udevadm-cleanup-db.service - Cleanup udev Database...
░░ Subject: A start job for unit initrd-udevadm-cleanup-db.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit initrd-udevadm-cleanup-db.service has begun execution.
░░ 
░░ The job identifier is 68.
Dec 14 11:26:27 localhost systemd[1]: systemd-tmpfiles-setup-dev.service: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ The unit systemd-tmpfiles-setup-dev.service has successfully entered the 'dead' state.
Dec 14 11:26:27 localhost systemd[1]: Stopped systemd-tmpfiles-setup-dev.service - Create Static Device Nodes in /dev.
░░ Subject: A stop job for unit systemd-tmpfiles-setup-dev.service has finished
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A stop job for unit systemd-tmpfiles-setup-dev.service has finished.
░░ 
░░ The job identifier is 95 and the job result is done.
Dec 14 11:26:27 localhost systemd[1]: systemd-sysusers.service: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ The unit systemd-sysusers.service has successfully entered the 'dead' state.
Dec 14 11:26:27 localhost systemd[1]: Stopped systemd-sysusers.service - Create System Users.
░░ Subject: A stop job for unit systemd-sysusers.service has finished
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A stop job for unit systemd-sysusers.service has finished.
░░ 
░░ The job identifier is 97 and the job result is done.
Dec 14 11:26:27 localhost systemd[1]: systemd-tmpfiles-setup-dev-early.service: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ The unit systemd-tmpfiles-setup-dev-early.service has successfully entered the 'dead' state.
Dec 14 11:26:27 localhost systemd[1]: Stopped systemd-tmpfiles-setup-dev-early.service - Create Static Device Nodes in /dev gracefully.
░░ Subject: A stop job for unit systemd-tmpfiles-setup-dev-early.service has finished
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A stop job for unit systemd-tmpfiles-setup-dev-early.service has finished.
░░ 
░░ The job identifier is 93 and the job result is done.
Dec 14 11:26:27 localhost systemd[1]: kmod-static-nodes.service: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ The unit kmod-static-nodes.service has successfully entered the 'dead' state.
Dec 14 11:26:27 localhost systemd[1]: Stopped kmod-static-nodes.service - Create List of Static Device Nodes.
░░ Subject: A stop job for unit kmod-static-nodes.service has finished
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A stop job for unit kmod-static-nodes.service has finished.
░░ 
░░ The job identifier is 110 and the job result is done.
Dec 14 11:26:27 localhost systemd[1]: initrd-udevadm-cleanup-db.service: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ The unit initrd-udevadm-cleanup-db.service has successfully entered the 'dead' state.
Dec 14 11:26:27 localhost systemd[1]: Finished initrd-udevadm-cleanup-db.service - Cleanup udev Database.
░░ Subject: A start job for unit initrd-udevadm-cleanup-db.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit initrd-udevadm-cleanup-db.service has finished successfully.
░░ 
░░ The job identifier is 68.
Dec 14 11:26:27 localhost systemd[1]: Reached target initrd-switch-root.target - Switch Root.
░░ Subject: A start job for unit initrd-switch-root.target has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit initrd-switch-root.target has finished successfully.
░░ 
░░ The job identifier is 67.
Dec 14 11:26:27 localhost systemd[1]: Starting initrd-switch-root.service - Switch Root...
░░ Subject: A start job for unit initrd-switch-root.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit initrd-switch-root.service has begun execution.
░░ 
░░ The job identifier is 82.
Dec 14 11:26:27 localhost systemd[1]: Switching root.
Dec 14 11:26:27 localhost systemd-journald[259]: Journal stopped
░░ Subject: The journal has been stopped
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ The system journal process has shut down and closed all currently
░░ active journal files.
Dec 14 11:26:32 localhost systemd-journald[259]: Received SIGTERM from PID 1 (systemd).
Dec 14 11:26:32 localhost kernel: audit: type=1404 audit(1734193588.634:2): enforcing=1 old_enforcing=0 auid=4294967295 ses=4294967295 enabled=1 old-enabled=1 lsm=selinux res=1
Dec 14 11:26:32 localhost kernel: SELinux:  policy capability network_peer_controls=1
Dec 14 11:26:32 localhost kernel: SELinux:  policy capability open_perms=1
Dec 14 11:26:32 localhost kernel: SELinux:  policy capability extended_socket_class=1
Dec 14 11:26:32 localhost kernel: SELinux:  policy capability always_check_network=0
Dec 14 11:26:32 localhost kernel: SELinux:  policy capability cgroup_seclabel=1
Dec 14 11:26:32 localhost kernel: SELinux:  policy capability nnp_nosuid_transition=1
Dec 14 11:26:32 localhost kernel: SELinux:  policy capability genfs_seclabel_symlinks=1
Dec 14 11:26:32 localhost kernel: SELinux:  policy capability ioctl_skip_cloexec=0
Dec 14 11:26:32 localhost kernel: SELinux:  policy capability userspace_initial_context=0
Dec 14 11:26:32 localhost kernel: audit: type=1403 audit(1734193588.797:3): auid=4294967295 ses=4294967295 lsm=selinux res=1
Dec 14 11:26:32 localhost systemd[1]: Successfully loaded SELinux policy in 201.101ms.
Dec 14 11:26:32 localhost systemd[1]: Relabeled /dev/, /dev/shm/, /run/ in 17.621ms.
Dec 14 11:26:32 localhost systemd[1]: systemd 256-16.el10 running in system mode (+PAM +AUDIT +SELINUX -APPARMOR +IMA +SMACK +SECCOMP -GCRYPT -GNUTLS +OPENSSL +ACL +BLKID +CURL +ELFUTILS +FIDO2 +IDN2 -IDN -IPTC +KMOD +LIBCRYPTSETUP +LIBCRYPTSETUP_PLUGINS +LIBFDISK +PCRE2 +PWQUALITY +P11KIT -QRENCODE +TPM2 +BZIP2 +LZ4 +XZ +ZLIB +ZSTD +BPF_FRAMEWORK +XKBCOMMON +UTMP +SYSVINIT +LIBARCHIVE)
Dec 14 11:26:32 localhost systemd[1]: Detected virtualization xen.
Dec 14 11:26:32 localhost systemd[1]: Detected architecture x86-64.
Dec 14 11:26:32 localhost systemd[1]: Initializing machine ID from VM UUID.
Dec 14 11:26:32 localhost systemd[1]: Installed transient /etc/machine-id file.
Dec 14 11:26:32 localhost systemd[1]: bpf-restrict-fs: LSM BPF program attached
Dec 14 11:26:32 localhost systemd[1]: run-credentials-systemd\x2djournald.service.mount: Deactivated successfully.
Dec 14 11:26:32 localhost systemd[1]: initrd-switch-root.service: Deactivated successfully.
Dec 14 11:26:32 localhost systemd[1]: Stopped initrd-switch-root.service - Switch Root.
Dec 14 11:26:32 localhost systemd[1]: systemd-journald.service: Scheduled restart job, restart counter is at 1.
Dec 14 11:26:32 localhost systemd[1]: Created slice system-getty.slice - Slice /system/getty.
Dec 14 11:26:32 localhost systemd[1]: Created slice system-serial\x2dgetty.slice - Slice /system/serial-getty.
Dec 14 11:26:32 localhost systemd[1]: Created slice system-sshd\x2dkeygen.slice - Slice /system/sshd-keygen.
Dec 14 11:26:32 localhost systemd[1]: Created slice user.slice - User and Session Slice.
Dec 14 11:26:32 localhost systemd[1]: Started systemd-ask-password-console.path - Dispatch Password Requests to Console Directory Watch.
Dec 14 11:26:32 localhost systemd[1]: Started systemd-ask-password-wall.path - Forward Password Requests to Wall Directory Watch.
Dec 14 11:26:32 localhost systemd[1]: Set up automount proc-sys-fs-binfmt_misc.automount - Arbitrary Executable File Formats File System Automount Point.
Dec 14 11:26:32 localhost systemd[1]: Expecting device dev-ttyS0.device - /dev/ttyS0...
Dec 14 11:26:32 localhost systemd[1]: Reached target cryptsetup.target - Local Encrypted Volumes.
Dec 14 11:26:32 localhost systemd[1]: Stopped target initrd-switch-root.target - Switch Root.
Dec 14 11:26:32 localhost systemd[1]: Stopped target initrd-fs.target - Initrd File Systems.
Dec 14 11:26:32 localhost systemd[1]: Stopped target initrd-root-fs.target - Initrd Root File System.
Dec 14 11:26:32 localhost systemd[1]: Reached target integritysetup.target - Local Integrity Protected Volumes.
Dec 14 11:26:32 localhost systemd[1]: Reached target paths.target - Path Units.
Dec 14 11:26:32 localhost systemd[1]: Reached target slices.target - Slice Units.
Dec 14 11:26:32 localhost systemd[1]: Reached target swap.target - Swaps.
Dec 14 11:26:32 localhost systemd[1]: Reached target veritysetup.target - Local Verity Protected Volumes.
Dec 14 11:26:32 localhost systemd[1]: Listening on dm-event.socket - Device-mapper event daemon FIFOs.
Dec 14 11:26:32 localhost systemd[1]: Listening on lvm2-lvmpolld.socket - LVM2 poll daemon socket.
Dec 14 11:26:32 localhost systemd[1]: Listening on rpcbind.socket - RPCbind Server Activation Socket.
Dec 14 11:26:32 localhost systemd[1]: Reached target rpcbind.target - RPC Port Mapper.
Dec 14 11:26:32 localhost systemd[1]: Listening on systemd-coredump.socket - Process Core Dump Socket.
Dec 14 11:26:32 localhost systemd[1]: Listening on systemd-creds.socket - Credential Encryption/Decryption.
Dec 14 11:26:32 localhost systemd[1]: Listening on systemd-initctl.socket - initctl Compatibility Named Pipe.
Dec 14 11:26:32 localhost systemd[1]: systemd-pcrextend.socket - TPM PCR Measurements was skipped because of an unmet condition check (ConditionSecurity=measured-uki).
Dec 14 11:26:32 localhost systemd[1]: systemd-pcrlock.socket - Make TPM PCR Policy was skipped because of an unmet condition check (ConditionSecurity=measured-uki).
Dec 14 11:26:32 localhost systemd[1]: Listening on systemd-udevd-control.socket - udev Control Socket.
Dec 14 11:26:32 localhost systemd[1]: Listening on systemd-udevd-kernel.socket - udev Kernel Socket.
Dec 14 11:26:32 localhost systemd[1]: Mounting dev-hugepages.mount - Huge Pages File System...
Dec 14 11:26:32 localhost systemd[1]: Mounting dev-mqueue.mount - POSIX Message Queue File System...
Dec 14 11:26:32 localhost systemd[1]: Mounting sys-kernel-debug.mount - Kernel Debug File System...
Dec 14 11:26:32 localhost systemd[1]: Mounting sys-kernel-tracing.mount - Kernel Trace File System...
Dec 14 11:26:32 localhost systemd[1]: auth-rpcgss-module.service - Kernel Module supporting RPCSEC_GSS was skipped because of an unmet condition check (ConditionPathExists=/etc/krb5.keytab).
Dec 14 11:26:32 localhost systemd[1]: fips-crypto-policy-overlay.service - Bind-mount FIPS crypto-policy in FIPS mode was skipped because of an unmet condition check (ConditionKernelCommandLine=fips=1).
Dec 14 11:26:32 localhost systemd[1]: Starting kmod-static-nodes.service - Create List of Static Device Nodes...
Dec 14 11:26:32 localhost systemd[1]: Starting lvm2-monitor.service - Monitoring of LVM2 mirrors, snapshots etc. using dmeventd or progress polling...
Dec 14 11:26:32 localhost systemd[1]: Starting modprobe@configfs.service - Load Kernel Module configfs...
Dec 14 11:26:32 localhost systemd[1]: Starting modprobe@dm_mod.service - Load Kernel Module dm_mod...
Dec 14 11:26:32 localhost systemd[1]: Starting modprobe@drm.service - Load Kernel Module drm...
Dec 14 11:26:32 localhost systemd[1]: Starting modprobe@efi_pstore.service - Load Kernel Module efi_pstore...
Dec 14 11:26:32 localhost systemd[1]: Starting modprobe@fuse.service - Load Kernel Module fuse...
Dec 14 11:26:32 localhost systemd[1]: Starting modprobe@loop.service - Load Kernel Module loop...
Dec 14 11:26:32 localhost systemd[1]: systemd-fsck-root.service: Deactivated successfully.
Dec 14 11:26:32 localhost systemd[1]: Stopped systemd-fsck-root.service - File System Check on Root Device.
Dec 14 11:26:32 localhost systemd[1]: systemd-hibernate-clear.service - Clear Stale Hibernate Storage Info was skipped because of an unmet condition check (ConditionPathExists=/sys/firmware/efi/efivars/HibernateLocation-8cf2644b-4b0b-428f-9387-6d876050dc67).
Dec 14 11:26:32 localhost systemd[1]: Starting systemd-journald.service - Journal Service...
Dec 14 11:26:32 localhost kernel: loop: module loaded
Dec 14 11:26:32 localhost kernel: device-mapper: core: CONFIG_IMA_DISABLE_HTABLE is disabled. Duplicate IMA measurements will not be recorded in the IMA log.
Dec 14 11:26:32 localhost kernel: device-mapper: uevent: version 1.0.3
Dec 14 11:26:32 localhost systemd[1]: systemd-modules-load.service - Load Kernel Modules was skipped because no trigger condition checks were met.
Dec 14 11:26:32 localhost kernel: device-mapper: ioctl: 4.48.0-ioctl (2023-03-01) initialised: dm-devel@lists.linux.dev
Dec 14 11:26:32 localhost kernel: fuse: init (API version 7.41)
Dec 14 11:26:32 localhost systemd[1]: Starting systemd-network-generator.service - Generate network units from Kernel command line...
Dec 14 11:26:32 localhost systemd[1]: systemd-pcrmachine.service - TPM PCR Machine ID Measurement was skipped because of an unmet condition check (ConditionSecurity=measured-uki).
Dec 14 11:26:32 localhost systemd[1]: Starting systemd-remount-fs.service - Remount Root and Kernel File Systems...
Dec 14 11:26:32 localhost systemd[1]: Starting systemd-sysctl.service - Apply Kernel Variables...
Dec 14 11:26:32 localhost systemd[1]: systemd-tpm2-setup-early.service - Early TPM SRK Setup was skipped because of an unmet condition check (ConditionSecurity=measured-uki).
Dec 14 11:26:32 localhost systemd[1]: Starting systemd-udev-load-credentials.service - Load udev Rules from Credentials...
Dec 14 11:26:32 localhost systemd-journald[523]: Collecting audit messages is disabled.
Dec 14 11:26:32 localhost systemd[1]: Starting systemd-udev-trigger.service - Coldplug All udev Devices...
Dec 14 11:26:32 localhost systemd[1]: Mounted dev-hugepages.mount - Huge Pages File System.
Dec 14 11:26:32 localhost systemd[1]: Mounted dev-mqueue.mount - POSIX Message Queue File System.
Dec 14 11:26:32 localhost systemd[1]: Mounted sys-kernel-debug.mount - Kernel Debug File System.
Dec 14 11:26:32 localhost systemd[1]: Mounted sys-kernel-tracing.mount - Kernel Trace File System.
Dec 14 11:26:32 localhost systemd[1]: Finished kmod-static-nodes.service - Create List of Static Device Nodes.
Dec 14 11:26:32 localhost systemd[1]: modprobe@configfs.service: Deactivated successfully.
Dec 14 11:26:32 localhost systemd[1]: Finished modprobe@configfs.service - Load Kernel Module configfs.
Dec 14 11:26:32 localhost systemd[1]: modprobe@dm_mod.service: Deactivated successfully.
Dec 14 11:26:32 localhost systemd[1]: Finished modprobe@dm_mod.service - Load Kernel Module dm_mod.
Dec 14 11:26:32 localhost systemd[1]: modprobe@drm.service: Deactivated successfully.
Dec 14 11:26:32 localhost systemd[1]: Finished modprobe@drm.service - Load Kernel Module drm.
Dec 14 11:26:32 localhost systemd[1]: modprobe@efi_pstore.service: Deactivated successfully.
Dec 14 11:26:32 localhost systemd[1]: Finished modprobe@efi_pstore.service - Load Kernel Module efi_pstore.
Dec 14 11:26:32 localhost systemd[1]: modprobe@fuse.service: Deactivated successfully.
Dec 14 11:26:32 localhost systemd[1]: Finished modprobe@fuse.service - Load Kernel Module fuse.
Dec 14 11:26:32 localhost systemd[1]: modprobe@loop.service: Deactivated successfully.
Dec 14 11:26:32 localhost systemd[1]: Finished modprobe@loop.service - Load Kernel Module loop.
Dec 14 11:26:32 localhost systemd[1]: Finished systemd-network-generator.service - Generate network units from Kernel command line.
Dec 14 11:26:32 localhost systemd[1]: Finished systemd-remount-fs.service - Remount Root and Kernel File Systems.
Dec 14 11:26:32 localhost systemd-journald[523]: Journal started
░░ Subject: The journal has been started
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ The system journal process has started up, opened the journal
░░ files for writing and is now ready to process requests.
Dec 14 11:26:32 localhost systemd-journald[523]: Runtime Journal (/run/log/journal/ec2a8c5f24a2db6683c20bfae8cc5947) is 8M, max 70.5M, 62.5M free.
░░ Subject: Disk space used by the journal
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ Runtime Journal (/run/log/journal/ec2a8c5f24a2db6683c20bfae8cc5947) is currently using 8M.
░░ Maximum allowed usage is set to 70.5M.
░░ Leaving at least 35.2M free (of currently available 689.3M of disk space).
░░ Enforced usage limit is thus 70.5M, of which 62.5M are still available.
░░ 
░░ The limits controlling how much disk space is used by the journal may
░░ be configured with SystemMaxUse=, SystemKeepFree=, SystemMaxFileSize=,
░░ RuntimeMaxUse=, RuntimeKeepFree=, RuntimeMaxFileSize= settings in
░░ /etc/systemd/journald.conf. See journald.conf(5) for details.
Dec 14 11:26:31 localhost systemd[1]: Queued start job for default target multi-user.target.
Dec 14 11:26:31 localhost systemd[1]: systemd-journald.service: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ The unit systemd-journald.service has successfully entered the 'dead' state.
Dec 14 11:26:32 localhost systemd[1]: Started systemd-journald.service - Journal Service.
Dec 14 11:26:32 localhost systemd[1]: Finished systemd-sysctl.service - Apply Kernel Variables.
░░ Subject: A start job for unit systemd-sysctl.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit systemd-sysctl.service has finished successfully.
░░ 
░░ The job identifier is 181.
Dec 14 11:26:32 localhost systemd[1]: systemd-hwdb-update.service - Rebuild Hardware Database was skipped because of an unmet condition check (ConditionNeedsUpdate=/etc).
░░ Subject: A start job for unit systemd-hwdb-update.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit systemd-hwdb-update.service has finished successfully.
░░ 
░░ The job identifier is 177.
Dec 14 11:26:32 localhost systemd[1]: Starting systemd-journal-flush.service - Flush Journal to Persistent Storage...
░░ Subject: A start job for unit systemd-journal-flush.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit systemd-journal-flush.service has begun execution.
░░ 
░░ The job identifier is 152.
Dec 14 11:26:32 localhost systemd[1]: systemd-pstore.service - Platform Persistent Storage Archival was skipped because of an unmet condition check (ConditionDirectoryNotEmpty=/sys/fs/pstore).
░░ Subject: A start job for unit systemd-pstore.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit systemd-pstore.service has finished successfully.
░░ 
░░ The job identifier is 147.
Dec 14 11:26:32 localhost systemd[1]: Starting systemd-random-seed.service - Load/Save OS Random Seed...
░░ Subject: A start job for unit systemd-random-seed.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit systemd-random-seed.service has begun execution.
░░ 
░░ The job identifier is 137.
Dec 14 11:26:32 localhost systemd[1]: systemd-repart.service - Repartition Root Disk was skipped because no trigger condition checks were met.
░░ Subject: A start job for unit systemd-repart.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit systemd-repart.service has finished successfully.
░░ 
░░ The job identifier is 160.
Dec 14 11:26:32 localhost systemd[1]: Starting systemd-tmpfiles-setup-dev-early.service - Create Static Device Nodes in /dev gracefully...
░░ Subject: A start job for unit systemd-tmpfiles-setup-dev-early.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit systemd-tmpfiles-setup-dev-early.service has begun execution.
░░ 
░░ The job identifier is 198.
Dec 14 11:26:32 localhost systemd[1]: systemd-tpm2-setup.service - TPM SRK Setup was skipped because of an unmet condition check (ConditionSecurity=measured-uki).
░░ Subject: A start job for unit systemd-tpm2-setup.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit systemd-tpm2-setup.service has finished successfully.
░░ 
░░ The job identifier is 151.
Dec 14 11:26:32 localhost systemd[1]: Finished systemd-udev-load-credentials.service - Load udev Rules from Credentials.
░░ Subject: A start job for unit systemd-udev-load-credentials.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit systemd-udev-load-credentials.service has finished successfully.
░░ 
░░ The job identifier is 173.
Dec 14 11:26:32 localhost systemd[1]: Finished systemd-random-seed.service - Load/Save OS Random Seed.
░░ Subject: A start job for unit systemd-random-seed.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit systemd-random-seed.service has finished successfully.
░░ 
░░ The job identifier is 137.
Dec 14 11:26:32 localhost systemd[1]: Finished lvm2-monitor.service - Monitoring of LVM2 mirrors, snapshots etc. using dmeventd or progress polling.
░░ Subject: A start job for unit lvm2-monitor.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit lvm2-monitor.service has finished successfully.
░░ 
░░ The job identifier is 186.
Dec 14 11:26:32 localhost systemd-journald[523]: Runtime Journal (/run/log/journal/ec2a8c5f24a2db6683c20bfae8cc5947) is 8M, max 70.5M, 62.5M free.
░░ Subject: Disk space used by the journal
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ Runtime Journal (/run/log/journal/ec2a8c5f24a2db6683c20bfae8cc5947) is currently using 8M.
░░ Maximum allowed usage is set to 70.5M.
░░ Leaving at least 35.2M free (of currently available 689.3M of disk space).
░░ Enforced usage limit is thus 70.5M, of which 62.5M are still available.
░░ 
░░ The limits controlling how much disk space is used by the journal may
░░ be configured with SystemMaxUse=, SystemKeepFree=, SystemMaxFileSize=,
░░ RuntimeMaxUse=, RuntimeKeepFree=, RuntimeMaxFileSize= settings in
░░ /etc/systemd/journald.conf. See journald.conf(5) for details.
Dec 14 11:26:32 localhost systemd-journald[523]: Received client request to flush runtime journal.
Dec 14 11:26:32 localhost systemd[1]: Finished systemd-journal-flush.service - Flush Journal to Persistent Storage.
░░ Subject: A start job for unit systemd-journal-flush.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit systemd-journal-flush.service has finished successfully.
░░ 
░░ The job identifier is 152.
Dec 14 11:26:32 localhost systemd[1]: Finished systemd-udev-trigger.service - Coldplug All udev Devices.
░░ Subject: A start job for unit systemd-udev-trigger.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit systemd-udev-trigger.service has finished successfully.
░░ 
░░ The job identifier is 185.
Dec 14 11:26:32 localhost systemd[1]: Finished systemd-tmpfiles-setup-dev-early.service - Create Static Device Nodes in /dev gracefully.
░░ Subject: A start job for unit systemd-tmpfiles-setup-dev-early.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit systemd-tmpfiles-setup-dev-early.service has finished successfully.
░░ 
░░ The job identifier is 198.
Dec 14 11:26:32 localhost systemd[1]: systemd-sysusers.service - Create System Users was skipped because no trigger condition checks were met.
░░ Subject: A start job for unit systemd-sysusers.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit systemd-sysusers.service has finished successfully.
░░ 
░░ The job identifier is 182.
Dec 14 11:26:32 localhost systemd[1]: Starting systemd-tmpfiles-setup-dev.service - Create Static Device Nodes in /dev...
░░ Subject: A start job for unit systemd-tmpfiles-setup-dev.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit systemd-tmpfiles-setup-dev.service has begun execution.
░░ 
░░ The job identifier is 145.
Dec 14 11:26:32 localhost systemd[1]: Finished systemd-tmpfiles-setup-dev.service - Create Static Device Nodes in /dev.
░░ Subject: A start job for unit systemd-tmpfiles-setup-dev.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit systemd-tmpfiles-setup-dev.service has finished successfully.
░░ 
░░ The job identifier is 145.
Dec 14 11:26:32 localhost systemd[1]: Reached target local-fs-pre.target - Preparation for Local File Systems.
░░ Subject: A start job for unit local-fs-pre.target has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit local-fs-pre.target has finished successfully.
░░ 
░░ The job identifier is 144.
Dec 14 11:26:32 localhost systemd[1]: Reached target local-fs.target - Local File Systems.
░░ Subject: A start job for unit local-fs.target has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit local-fs.target has finished successfully.
░░ 
░░ The job identifier is 142.
Dec 14 11:26:32 localhost systemd[1]: Listening on systemd-bootctl.socket - Boot Entries Service Socket.
░░ Subject: A start job for unit systemd-bootctl.socket has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit systemd-bootctl.socket has finished successfully.
░░ 
░░ The job identifier is 213.
Dec 14 11:26:32 localhost systemd[1]: Listening on systemd-sysext.socket - System Extension Image Management.
░░ Subject: A start job for unit systemd-sysext.socket has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit systemd-sysext.socket has finished successfully.
░░ 
░░ The job identifier is 220.
Dec 14 11:26:32 localhost systemd[1]: ldconfig.service - Rebuild Dynamic Linker Cache was skipped because no trigger condition checks were met.
░░ Subject: A start job for unit ldconfig.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit ldconfig.service has finished successfully.
░░ 
░░ The job identifier is 146.
Dec 14 11:26:32 localhost systemd[1]: selinux-autorelabel-mark.service - Mark the need to relabel after reboot was skipped because of an unmet condition check (ConditionSecurity=!selinux).
░░ Subject: A start job for unit selinux-autorelabel-mark.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit selinux-autorelabel-mark.service has finished successfully.
░░ 
░░ The job identifier is 190.
Dec 14 11:26:32 localhost systemd[1]: systemd-binfmt.service - Set Up Additional Binary Formats was skipped because no trigger condition checks were met.
░░ Subject: A start job for unit systemd-binfmt.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit systemd-binfmt.service has finished successfully.
░░ 
░░ The job identifier is 193.
Dec 14 11:26:32 localhost systemd[1]: systemd-boot-random-seed.service - Update Boot Loader Random Seed was skipped because no trigger condition checks were met.
░░ Subject: A start job for unit systemd-boot-random-seed.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit systemd-boot-random-seed.service has finished successfully.
░░ 
░░ The job identifier is 179.
Dec 14 11:26:32 localhost systemd[1]: systemd-confext.service - Merge System Configuration Images into /etc/ was skipped because no trigger condition checks were met.
░░ Subject: A start job for unit systemd-confext.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit systemd-confext.service has finished successfully.
░░ 
░░ The job identifier is 157.
Dec 14 11:26:32 localhost systemd[1]: systemd-sysext.service - Merge System Extension Images into /usr/ and /opt/ was skipped because no trigger condition checks were met.
░░ Subject: A start job for unit systemd-sysext.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit systemd-sysext.service has finished successfully.
░░ 
░░ The job identifier is 189.
Dec 14 11:26:32 localhost systemd[1]: Starting systemd-tmpfiles-setup.service - Create System Files and Directories...
░░ Subject: A start job for unit systemd-tmpfiles-setup.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit systemd-tmpfiles-setup.service has begun execution.
░░ 
░░ The job identifier is 139.
Dec 14 11:26:32 localhost systemd[1]: Starting systemd-udevd.service - Rule-based Manager for Device Events and Files...
░░ Subject: A start job for unit systemd-udevd.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit systemd-udevd.service has begun execution.
░░ 
░░ The job identifier is 172.
Dec 14 11:26:32 localhost systemd-udevd[562]: Using default interface naming scheme 'rhel-10.0-beta'.
Dec 14 11:26:32 localhost systemd[1]: Started systemd-udevd.service - Rule-based Manager for Device Events and Files.
░░ Subject: A start job for unit systemd-udevd.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit systemd-udevd.service has finished successfully.
░░ 
░░ The job identifier is 172.
Dec 14 11:26:32 localhost systemd[1]: Starting modprobe@fuse.service - Load Kernel Module fuse...
░░ Subject: A start job for unit modprobe@fuse.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit modprobe@fuse.service has begun execution.
░░ 
░░ The job identifier is 294.
Dec 14 11:26:32 localhost systemd[1]: Starting modprobe@configfs.service - Load Kernel Module configfs...
░░ Subject: A start job for unit modprobe@configfs.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit modprobe@configfs.service has begun execution.
░░ 
░░ The job identifier is 302.
Dec 14 11:26:32 localhost systemd[1]: Finished systemd-tmpfiles-setup.service - Create System Files and Directories.
░░ Subject: A start job for unit systemd-tmpfiles-setup.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit systemd-tmpfiles-setup.service has finished successfully.
░░ 
░░ The job identifier is 139.
Dec 14 11:26:32 localhost systemd[1]: modprobe@fuse.service: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ The unit modprobe@fuse.service has successfully entered the 'dead' state.
Dec 14 11:26:32 localhost systemd[1]: Finished modprobe@fuse.service - Load Kernel Module fuse.
░░ Subject: A start job for unit modprobe@fuse.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit modprobe@fuse.service has finished successfully.
░░ 
░░ The job identifier is 294.
Dec 14 11:26:32 localhost systemd[1]: modprobe@configfs.service: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ The unit modprobe@configfs.service has successfully entered the 'dead' state.
Dec 14 11:26:32 localhost systemd[1]: Finished modprobe@configfs.service - Load Kernel Module configfs.
░░ Subject: A start job for unit modprobe@configfs.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit modprobe@configfs.service has finished successfully.
░░ 
░░ The job identifier is 302.
Dec 14 11:26:32 localhost systemd[1]: Starting audit-rules.service - Load Audit Rules...
░░ Subject: A start job for unit audit-rules.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit audit-rules.service has begun execution.
░░ 
░░ The job identifier is 236.
Dec 14 11:26:32 localhost systemd[1]: Starting rpcbind.service - RPC Bind...
░░ Subject: A start job for unit rpcbind.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit rpcbind.service has begun execution.
░░ 
░░ The job identifier is 253.
Dec 14 11:26:32 localhost systemd[1]: systemd-firstboot.service - First Boot Wizard was skipped because of an unmet condition check (ConditionFirstBoot=yes).
░░ Subject: A start job for unit systemd-firstboot.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit systemd-firstboot.service has finished successfully.
░░ 
░░ The job identifier is 180.
Dec 14 11:26:32 localhost systemd[1]: first-boot-complete.target - First Boot Complete was skipped because of an unmet condition check (ConditionFirstBoot=yes).
░░ Subject: A start job for unit first-boot-complete.target has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit first-boot-complete.target has finished successfully.
░░ 
░░ The job identifier is 138.
Dec 14 11:26:32 localhost systemd[1]: systemd-journal-catalog-update.service - Rebuild Journal Catalog was skipped because of an unmet condition check (ConditionNeedsUpdate=/var).
░░ Subject: A start job for unit systemd-journal-catalog-update.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit systemd-journal-catalog-update.service has finished successfully.
░░ 
░░ The job identifier is 131.
Dec 14 11:26:32 localhost systemd[1]: Starting systemd-machine-id-commit.service - Save Transient machine-id to Disk...
░░ Subject: A start job for unit systemd-machine-id-commit.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit systemd-machine-id-commit.service has begun execution.
░░ 
░░ The job identifier is 194.
Dec 14 11:26:32 localhost systemd[1]: systemd-update-done.service - Update is Completed was skipped because no trigger condition checks were met.
░░ Subject: A start job for unit systemd-update-done.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit systemd-update-done.service has finished successfully.
░░ 
░░ The job identifier is 168.
Dec 14 11:26:32 localhost systemd[1]: Condition check resulted in dev-ttyS0.device - /dev/ttyS0 being skipped.
░░ Subject: A start job for unit dev-ttyS0.device has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit dev-ttyS0.device has finished successfully.
░░ 
░░ The job identifier is 232.
Dec 14 11:26:32 localhost systemd[1]: Finished systemd-machine-id-commit.service - Save Transient machine-id to Disk.
░░ Subject: A start job for unit systemd-machine-id-commit.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit systemd-machine-id-commit.service has finished successfully.
░░ 
░░ The job identifier is 194.
Dec 14 11:26:32 localhost systemd[1]: run-credentials-systemd\x2dnetwork\x2dgenerator.service.mount: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ The unit run-credentials-systemd\x2dnetwork\x2dgenerator.service.mount has successfully entered the 'dead' state.
Dec 14 11:26:32 localhost systemd[1]: run-credentials-systemd\x2dsysctl.service.mount: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ The unit run-credentials-systemd\x2dsysctl.service.mount has successfully entered the 'dead' state.
Dec 14 11:26:32 localhost systemd[1]: run-credentials-systemd\x2dudev\x2dload\x2dcredentials.service.mount: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ The unit run-credentials-systemd\x2dudev\x2dload\x2dcredentials.service.mount has successfully entered the 'dead' state.
Dec 14 11:26:32 localhost systemd[1]: etc-machine\x2did.mount: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ The unit etc-machine\x2did.mount has successfully entered the 'dead' state.
Dec 14 11:26:32 localhost systemd[1]: Mounting sys-fs-fuse-connections.mount - FUSE Control File System...
░░ Subject: A start job for unit sys-fs-fuse-connections.mount has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit sys-fs-fuse-connections.mount has begun execution.
░░ 
░░ The job identifier is 166.
Dec 14 11:26:32 localhost systemd[1]: Mounting var-lib-nfs-rpc_pipefs.mount - RPC Pipe File System...
░░ Subject: A start job for unit var-lib-nfs-rpc_pipefs.mount has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit var-lib-nfs-rpc_pipefs.mount has begun execution.
░░ 
░░ The job identifier is 247.
Dec 14 11:26:32 localhost systemd[1]: Mounted sys-fs-fuse-connections.mount - FUSE Control File System.
░░ Subject: A start job for unit sys-fs-fuse-connections.mount has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit sys-fs-fuse-connections.mount has finished successfully.
░░ 
░░ The job identifier is 166.
Dec 14 11:26:32 localhost (udev-worker)[574]: Network interface NamePolicy= disabled on kernel command line.
Dec 14 11:26:32 localhost kernel: RPC: Registered named UNIX socket transport module.
Dec 14 11:26:32 localhost kernel: RPC: Registered udp transport module.
Dec 14 11:26:32 localhost kernel: RPC: Registered tcp transport module.
Dec 14 11:26:32 localhost kernel: RPC: Registered tcp-with-tls transport module.
Dec 14 11:26:32 localhost kernel: RPC: Registered tcp NFSv4.1 backchannel transport module.
Dec 14 11:26:32 localhost systemd[1]: Mounted var-lib-nfs-rpc_pipefs.mount - RPC Pipe File System.
░░ Subject: A start job for unit var-lib-nfs-rpc_pipefs.mount has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit var-lib-nfs-rpc_pipefs.mount has finished successfully.
░░ 
░░ The job identifier is 247.
Dec 14 11:26:32 localhost systemd[1]: Reached target rpc_pipefs.target.
░░ Subject: A start job for unit rpc_pipefs.target has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit rpc_pipefs.target has finished successfully.
░░ 
░░ The job identifier is 246.
Dec 14 11:26:33 localhost augenrules[584]: /sbin/augenrules: No change
Dec 14 11:26:33 localhost augenrules[615]: No rules
Dec 14 11:26:33 localhost augenrules[615]: enabled 0
Dec 14 11:26:33 localhost augenrules[615]: failure 1
Dec 14 11:26:33 localhost augenrules[615]: pid 0
Dec 14 11:26:33 localhost augenrules[615]: rate_limit 0
Dec 14 11:26:33 localhost augenrules[615]: backlog_limit 8192
Dec 14 11:26:33 localhost augenrules[615]: lost 0
Dec 14 11:26:33 localhost augenrules[615]: backlog 0
Dec 14 11:26:33 localhost augenrules[615]: backlog_wait_time 60000
Dec 14 11:26:33 localhost augenrules[615]: backlog_wait_time_actual 0
Dec 14 11:26:33 localhost augenrules[615]: enabled 0
Dec 14 11:26:33 localhost augenrules[615]: failure 1
Dec 14 11:26:33 localhost augenrules[615]: pid 0
Dec 14 11:26:33 localhost augenrules[615]: rate_limit 0
Dec 14 11:26:33 localhost augenrules[615]: backlog_limit 8192
Dec 14 11:26:33 localhost augenrules[615]: lost 0
Dec 14 11:26:33 localhost augenrules[615]: backlog 0
Dec 14 11:26:33 localhost augenrules[615]: backlog_wait_time 60000
Dec 14 11:26:33 localhost augenrules[615]: backlog_wait_time_actual 0
Dec 14 11:26:33 localhost augenrules[615]: enabled 0
Dec 14 11:26:33 localhost augenrules[615]: failure 1
Dec 14 11:26:33 localhost augenrules[615]: pid 0
Dec 14 11:26:33 localhost augenrules[615]: rate_limit 0
Dec 14 11:26:33 localhost augenrules[615]: backlog_limit 8192
Dec 14 11:26:33 localhost augenrules[615]: lost 0
Dec 14 11:26:33 localhost augenrules[615]: backlog 0
Dec 14 11:26:33 localhost augenrules[615]: backlog_wait_time 60000
Dec 14 11:26:33 localhost augenrules[615]: backlog_wait_time_actual 0
Dec 14 11:26:33 localhost systemd[1]: audit-rules.service: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ The unit audit-rules.service has successfully entered the 'dead' state.
Dec 14 11:26:33 localhost systemd[1]: Finished audit-rules.service - Load Audit Rules.
░░ Subject: A start job for unit audit-rules.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit audit-rules.service has finished successfully.
░░ 
░░ The job identifier is 236.
Dec 14 11:26:33 localhost systemd[1]: Starting auditd.service - Security Audit Logging Service...
░░ Subject: A start job for unit auditd.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit auditd.service has begun execution.
░░ 
░░ The job identifier is 235.
Dec 14 11:26:33 localhost kernel: input: PC Speaker as /devices/platform/pcspkr/input/input5
Dec 14 11:26:33 localhost kernel: cirrus 0000:00:02.0: vgaarb: deactivate vga console
Dec 14 11:26:33 localhost kernel: Console: switching to colour dummy device 80x25
Dec 14 11:26:33 localhost kernel: [drm] Initialized cirrus 2.0.0 for 0000:00:02.0 on minor 0
Dec 14 11:26:33 localhost kernel: fbcon: cirrusdrmfb (fb0) is primary device
Dec 14 11:26:33 localhost kernel: Console: switching to colour frame buffer device 128x48
Dec 14 11:26:33 localhost kernel: cirrus 0000:00:02.0: [drm] fb0: cirrusdrmfb frame buffer device
Dec 14 11:26:33 localhost systemd[1]: Started auditd.service - Security Audit Logging Service.
░░ Subject: A start job for unit auditd.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit auditd.service has finished successfully.
░░ 
░░ The job identifier is 235.
Dec 14 11:26:33 localhost auditd[625]: No plugins found, not dispatching events
Dec 14 11:26:33 localhost systemd[1]: Starting systemd-update-utmp.service - Record System Boot/Shutdown in UTMP...
░░ Subject: A start job for unit systemd-update-utmp.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit systemd-update-utmp.service has begun execution.
░░ 
░░ The job identifier is 258.
Dec 14 11:26:33 localhost auditd[625]: Init complete, auditd 4.0 listening for events (startup state enable)
Dec 14 11:26:33 localhost systemd[1]: Started rpcbind.service - RPC Bind.
░░ Subject: A start job for unit rpcbind.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit rpcbind.service has finished successfully.
░░ 
░░ The job identifier is 253.
Dec 14 11:26:33 localhost systemd[1]: Finished systemd-update-utmp.service - Record System Boot/Shutdown in UTMP.
░░ Subject: A start job for unit systemd-update-utmp.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit systemd-update-utmp.service has finished successfully.
░░ 
░░ The job identifier is 258.
Dec 14 11:26:33 localhost systemd[1]: Reached target sysinit.target - System Initialization.
░░ Subject: A start job for unit sysinit.target has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit sysinit.target has finished successfully.
░░ 
░░ The job identifier is 125.
Dec 14 11:26:33 localhost systemd[1]: Started dnf-makecache.timer - dnf makecache --timer.
░░ Subject: A start job for unit dnf-makecache.timer has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit dnf-makecache.timer has finished successfully.
░░ 
░░ The job identifier is 202.
Dec 14 11:26:33 localhost kernel: RAPL PMU: API unit is 2^-32 Joules, 0 fixed counters, 655360 ms ovfl timer
Dec 14 11:26:33 localhost systemd[1]: Started fstrim.timer - Discard unused filesystem blocks once a week.
░░ Subject: A start job for unit fstrim.timer has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit fstrim.timer has finished successfully.
░░ 
░░ The job identifier is 201.
Dec 14 11:26:33 localhost kernel: piix4_smbus 0000:00:01.3: SMBus base address uninitialized - upgrade BIOS or use force_addr=0xaddr
Dec 14 11:26:33 localhost systemd[1]: Started logrotate.timer - Daily rotation of log files.
░░ Subject: A start job for unit logrotate.timer has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit logrotate.timer has finished successfully.
░░ 
░░ The job identifier is 209.
Dec 14 11:26:33 localhost systemd[1]: Started systemd-tmpfiles-clean.timer - Daily Cleanup of Temporary Directories.
░░ Subject: A start job for unit systemd-tmpfiles-clean.timer has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit systemd-tmpfiles-clean.timer has finished successfully.
░░ 
░░ The job identifier is 210.
Dec 14 11:26:33 localhost systemd[1]: Reached target timers.target - Timer Units.
░░ Subject: A start job for unit timers.target has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit timers.target has finished successfully.
░░ 
░░ The job identifier is 200.
Dec 14 11:26:33 localhost systemd[1]: Listening on dbus.socket - D-Bus System Message Bus Socket.
░░ Subject: A start job for unit dbus.socket has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit dbus.socket has finished successfully.
░░ 
░░ The job identifier is 206.
Dec 14 11:26:33 localhost systemd[1]: Listening on pcscd.socket - PC/SC Smart Card Daemon Activation Socket.
░░ Subject: A start job for unit pcscd.socket has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit pcscd.socket has finished successfully.
░░ 
░░ The job identifier is 222.
Dec 14 11:26:33 localhost systemd[1]: Listening on sssd-kcm.socket - SSSD Kerberos Cache Manager responder socket.
░░ Subject: A start job for unit sssd-kcm.socket has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit sssd-kcm.socket has finished successfully.
░░ 
░░ The job identifier is 214.
Dec 14 11:26:33 localhost systemd[1]: Listening on systemd-hostnamed.socket - Hostname Service Socket.
░░ Subject: A start job for unit systemd-hostnamed.socket has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit systemd-hostnamed.socket has finished successfully.
░░ 
░░ The job identifier is 223.
Dec 14 11:26:33 localhost systemd[1]: Reached target sockets.target - Socket Units.
░░ Subject: A start job for unit sockets.target has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit sockets.target has finished successfully.
░░ 
░░ The job identifier is 211.
Dec 14 11:26:33 localhost systemd[1]: Starting dbus-broker.service - D-Bus System Message Bus...
░░ Subject: A start job for unit dbus-broker.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit dbus-broker.service has begun execution.
░░ 
░░ The job identifier is 207.
Dec 14 11:26:33 localhost systemd[1]: systemd-pcrphase-sysinit.service - TPM PCR Barrier (Initialization) was skipped because of an unmet condition check (ConditionSecurity=measured-uki).
░░ Subject: A start job for unit systemd-pcrphase-sysinit.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit systemd-pcrphase-sysinit.service has finished successfully.
░░ 
░░ The job identifier is 135.
Dec 14 11:26:33 localhost systemd[1]: Starting systemd-vconsole-setup.service - Virtual Console Setup...
░░ Subject: A start job for unit systemd-vconsole-setup.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit systemd-vconsole-setup.service has begun execution.
░░ 
░░ The job identifier is 318.
Dec 14 11:26:33 localhost systemd[1]: systemd-vconsole-setup.service: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ The unit systemd-vconsole-setup.service has successfully entered the 'dead' state.
Dec 14 11:26:33 localhost systemd[1]: Stopped systemd-vconsole-setup.service - Virtual Console Setup.
░░ Subject: A stop job for unit systemd-vconsole-setup.service has finished
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A stop job for unit systemd-vconsole-setup.service has finished.
░░ 
░░ The job identifier is 318 and the job result is done.
Dec 14 11:26:33 localhost systemd[1]: Starting systemd-vconsole-setup.service - Virtual Console Setup...
░░ Subject: A start job for unit systemd-vconsole-setup.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit systemd-vconsole-setup.service has begun execution.
░░ 
░░ The job identifier is 318.
Dec 14 11:26:33 localhost systemd[1]: Started dbus-broker.service - D-Bus System Message Bus.
░░ Subject: A start job for unit dbus-broker.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit dbus-broker.service has finished successfully.
░░ 
░░ The job identifier is 207.
Dec 14 11:26:33 localhost systemd[1]: Reached target basic.target - Basic System.
░░ Subject: A start job for unit basic.target has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit basic.target has finished successfully.
░░ 
░░ The job identifier is 122.
Dec 14 11:26:33 localhost dbus-broker-launch[637]: Ready
Dec 14 11:26:33 localhost systemd[1]: Starting chronyd.service - NTP client/server...
░░ Subject: A start job for unit chronyd.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit chronyd.service has begun execution.
░░ 
░░ The job identifier is 268.
Dec 14 11:26:33 localhost systemd[1]: Starting cloud-init-local.service - Initial cloud-init job (pre-networking)...
░░ Subject: A start job for unit cloud-init-local.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit cloud-init-local.service has begun execution.
░░ 
░░ The job identifier is 275.
Dec 14 11:26:33 localhost systemd[1]: Starting dracut-shutdown.service - Restore /run/initramfs on shutdown...
░░ Subject: A start job for unit dracut-shutdown.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit dracut-shutdown.service has begun execution.
░░ 
░░ The job identifier is 184.
Dec 14 11:26:33 localhost systemd[1]: Started irqbalance.service - irqbalance daemon.
░░ Subject: A start job for unit irqbalance.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit irqbalance.service has finished successfully.
░░ 
░░ The job identifier is 234.
Dec 14 11:26:33 localhost systemd[1]: Started rngd.service - Hardware RNG Entropy Gatherer Daemon.
░░ Subject: A start job for unit rngd.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit rngd.service has finished successfully.
░░ 
░░ The job identifier is 259.
Dec 14 11:26:33 localhost systemd[1]: Starting rsyslog.service - System Logging Service...
░░ Subject: A start job for unit rsyslog.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit rsyslog.service has begun execution.
░░ 
░░ The job identifier is 272.
Dec 14 11:26:33 localhost systemd[1]: ssh-host-keys-migration.service - Update OpenSSH host key permissions was skipped because of an unmet condition check (ConditionPathExists=!/var/lib/.ssh-host-keys-migration).
░░ Subject: A start job for unit ssh-host-keys-migration.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit ssh-host-keys-migration.service has finished successfully.
░░ 
░░ The job identifier is 267.
Dec 14 11:26:33 localhost systemd[1]: sshd-keygen@ecdsa.service - OpenSSH ecdsa Server Key Generation was skipped because of an unmet condition check (ConditionPathExists=!/run/systemd/generator.early/multi-user.target.wants/cloud-init.target).
░░ Subject: A start job for unit sshd-keygen@ecdsa.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit sshd-keygen@ecdsa.service has finished successfully.
░░ 
░░ The job identifier is 263.
Dec 14 11:26:33 localhost systemd[1]: sshd-keygen@ed25519.service - OpenSSH ed25519 Server Key Generation was skipped because of an unmet condition check (ConditionPathExists=!/run/systemd/generator.early/multi-user.target.wants/cloud-init.target).
░░ Subject: A start job for unit sshd-keygen@ed25519.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit sshd-keygen@ed25519.service has finished successfully.
░░ 
░░ The job identifier is 265.
Dec 14 11:26:33 localhost systemd[1]: sshd-keygen@rsa.service - OpenSSH rsa Server Key Generation was skipped because of an unmet condition check (ConditionPathExists=!/run/systemd/generator.early/multi-user.target.wants/cloud-init.target).
░░ Subject: A start job for unit sshd-keygen@rsa.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit sshd-keygen@rsa.service has finished successfully.
░░ 
░░ The job identifier is 266.
Dec 14 11:26:33 localhost systemd[1]: Reached target sshd-keygen.target.
░░ Subject: A start job for unit sshd-keygen.target has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit sshd-keygen.target has finished successfully.
░░ 
░░ The job identifier is 262.
Dec 14 11:26:33 localhost systemd[1]: sssd.service - System Security Services Daemon was skipped because no trigger condition checks were met.
░░ Subject: A start job for unit sssd.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit sssd.service has finished successfully.
░░ 
░░ The job identifier is 237.
Dec 14 11:26:33 localhost systemd[1]: Reached target nss-user-lookup.target - User and Group Name Lookups.
░░ Subject: A start job for unit nss-user-lookup.target has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit nss-user-lookup.target has finished successfully.
░░ 
░░ The job identifier is 238.
Dec 14 11:26:33 localhost systemd[1]: Starting systemd-logind.service - User Login Management...
░░ Subject: A start job for unit systemd-logind.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit systemd-logind.service has begun execution.
░░ 
░░ The job identifier is 250.
Dec 14 11:26:33 localhost systemd[1]: Finished dracut-shutdown.service - Restore /run/initramfs on shutdown.
░░ Subject: A start job for unit dracut-shutdown.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit dracut-shutdown.service has finished successfully.
░░ 
░░ The job identifier is 184.
Dec 14 11:26:33 localhost (qbalance)[649]: irqbalance.service: Referenced but unset environment variable evaluates to an empty string: IRQBALANCE_ARGS
Dec 14 11:26:33 localhost systemd[1]: Started rsyslog.service - System Logging Service.
░░ Subject: A start job for unit rsyslog.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit rsyslog.service has finished successfully.
░░ 
░░ The job identifier is 272.
Dec 14 11:26:33 localhost rsyslogd[651]: imjournal: filecreatemode is not set, using default 0644 [v8.2408.0-2.el10 try https://www.rsyslog.com/e/2186 ]
Dec 14 11:26:33 localhost rsyslogd[651]: [origin software="rsyslogd" swVersion="8.2408.0-2.el10" x-pid="651" x-info="https://www.rsyslog.com"] start
Dec 14 11:26:33 localhost systemd-logind[653]: New seat seat0.
░░ Subject: A new seat seat0 is now available
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ Documentation: sd-login(3)
░░ 
░░ A new seat seat0 has been configured and is now available.
Dec 14 11:26:33 localhost systemd-logind[653]: Watching system buttons on /dev/input/event0 (Power Button)
Dec 14 11:26:33 localhost systemd-logind[653]: Watching system buttons on /dev/input/event1 (Sleep Button)
Dec 14 11:26:33 localhost systemd-logind[653]: Watching system buttons on /dev/input/event2 (AT Translated Set 2 keyboard)
Dec 14 11:26:33 localhost systemd[1]: Started systemd-logind.service - User Login Management.
░░ Subject: A start job for unit systemd-logind.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit systemd-logind.service has finished successfully.
░░ 
░░ The job identifier is 250.
Dec 14 11:26:33 localhost systemd[1]: Finished systemd-vconsole-setup.service - Virtual Console Setup.
░░ Subject: A start job for unit systemd-vconsole-setup.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit systemd-vconsole-setup.service has finished successfully.
░░ 
░░ The job identifier is 318.
Dec 14 11:26:33 localhost systemd[1]: run-credentials-systemd\x2dvconsole\x2dsetup.service.mount: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ The unit run-credentials-systemd\x2dvconsole\x2dsetup.service.mount has successfully entered the 'dead' state.
Dec 14 11:26:33 localhost rsyslogd[651]: imjournal: journal files changed, reloading...  [v8.2408.0-2.el10 try https://www.rsyslog.com/e/0 ]
Dec 14 11:26:33 localhost rngd[650]: Disabling 7: PKCS11 Entropy generator (pkcs11)
Dec 14 11:26:33 localhost rngd[650]: Disabling 5: NIST Network Entropy Beacon (nist)
Dec 14 11:26:33 localhost rngd[650]: Disabling 9: Qrypt quantum entropy beacon (qrypt)
Dec 14 11:26:33 localhost rngd[650]: Disabling 10: Named pipe entropy input (namedpipe)
Dec 14 11:26:33 localhost rngd[650]: Initializing available sources
Dec 14 11:26:33 localhost rngd[650]: [hwrng ]: Initialization Failed
Dec 14 11:26:33 localhost rngd[650]: [rdrand]: Enabling RDRAND rng support
Dec 14 11:26:33 localhost rngd[650]: [rdrand]: Initialized
Dec 14 11:26:33 localhost rngd[650]: [jitter]: JITTER timeout set to 5 sec
Dec 14 11:26:33 localhost chronyd[664]: chronyd version 4.6.1 starting (+CMDMON +NTP +REFCLOCK +RTC +PRIVDROP +SCFILTER +SIGND +ASYNCDNS +NTS +SECHASH +IPV6 +DEBUG)
Dec 14 11:26:33 localhost rngd[650]: [jitter]: Initializing AES buffer
Dec 14 11:26:33 localhost chronyd[664]: Frequency 0.000 +/- 1000000.000 ppm read from /var/lib/chrony/drift
Dec 14 11:26:33 localhost systemd[1]: Started chronyd.service - NTP client/server.
░░ Subject: A start job for unit chronyd.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit chronyd.service has finished successfully.
░░ 
░░ The job identifier is 268.
Dec 14 11:26:33 localhost chronyd[664]: Loaded seccomp filter (level 2)
Dec 14 11:26:36 localhost cloud-init[671]: Cloud-init v. 24.1.4-21.el10 running 'init-local' at Sat, 14 Dec 2024 16:26:36 +0000. Up 12.60 seconds.
Dec 14 11:26:36 localhost dhcpcd[673]: dhcpcd-10.0.6 starting
Dec 14 11:26:36 localhost kernel: 8021q: 802.1Q VLAN Support v1.8
Dec 14 11:26:36 localhost systemd[1]: Listening on systemd-rfkill.socket - Load/Save RF Kill Switch Status /dev/rfkill Watch.
░░ Subject: A start job for unit systemd-rfkill.socket has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit systemd-rfkill.socket has finished successfully.
░░ 
░░ The job identifier is 328.
Dec 14 11:26:37 localhost kernel: cfg80211: Loading compiled-in X.509 certificates for regulatory database
Dec 14 11:26:37 localhost kernel: Loaded X.509 cert 'sforshee: 00b28ddf47aef9cea7'
Dec 14 11:26:37 localhost kernel: Loaded X.509 cert 'wens: 61c038651aabdcf94bd0ac7ff06c7248db18c600'
Dec 14 11:26:37 localhost dhcpcd[676]: DUID 00:01:00:01:2e:f0:6e:3d:0e:03:6a:4a:4d:55
Dec 14 11:26:37 localhost dhcpcd[676]: eth0: IAID 6a:4a:4d:55
Dec 14 11:26:37 localhost kernel: platform regulatory.0: Direct firmware load for regulatory.db failed with error -2
Dec 14 11:26:37 localhost kernel: cfg80211: failed to load regulatory.db
Dec 14 11:26:38 localhost rngd[650]: [jitter]: Unable to obtain AES key, disabling JITTER source
Dec 14 11:26:38 localhost rngd[650]: [jitter]: Initialization Failed
Dec 14 11:26:38 localhost rngd[650]: Process privileges have been dropped to 2:2
Dec 14 11:26:38 localhost dhcpcd[676]: eth0: soliciting a DHCP lease
Dec 14 11:26:38 localhost dhcpcd[676]: eth0: offered 10.31.43.117 from 10.31.40.1
Dec 14 11:26:38 localhost dhcpcd[676]: eth0: leased 10.31.43.117 for 3600 seconds
Dec 14 11:26:38 localhost dhcpcd[676]: eth0: adding route to 10.31.40.0/22
Dec 14 11:26:38 localhost dhcpcd[676]: eth0: adding default route via 10.31.40.1
Dec 14 11:26:38 localhost dhcpcd[676]: control command: /usr/sbin/dhcpcd --dumplease --ipv4only eth0
Dec 14 11:26:38 localhost systemd[1]: Starting systemd-hostnamed.service - Hostname Service...
░░ Subject: A start job for unit systemd-hostnamed.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit systemd-hostnamed.service has begun execution.
░░ 
░░ The job identifier is 337.
Dec 14 11:26:38 localhost systemd[1]: Started systemd-hostnamed.service - Hostname Service.
░░ Subject: A start job for unit systemd-hostnamed.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit systemd-hostnamed.service has finished successfully.
░░ 
░░ The job identifier is 337.
Dec 14 11:26:38 ip-10-31-43-117.us-east-1.aws.redhat.com systemd-hostnamed[696]: Hostname set to <ip-10-31-43-117.us-east-1.aws.redhat.com> (static)
Dec 14 11:26:38 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: Finished cloud-init-local.service - Initial cloud-init job (pre-networking).
░░ Subject: A start job for unit cloud-init-local.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit cloud-init-local.service has finished successfully.
░░ 
░░ The job identifier is 275.
Dec 14 11:26:38 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: Reached target network-pre.target - Preparation for Network.
░░ Subject: A start job for unit network-pre.target has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit network-pre.target has finished successfully.
░░ 
░░ The job identifier is 156.
Dec 14 11:26:38 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: Starting NetworkManager.service - Network Manager...
░░ Subject: A start job for unit NetworkManager.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit NetworkManager.service has begun execution.
░░ 
░░ The job identifier is 205.
Dec 14 11:26:38 ip-10-31-43-117.us-east-1.aws.redhat.com NetworkManager[703]: <info>  [1734193598.7758] NetworkManager (version 1.51.4-1.el10) is starting... (boot:38eff4b5-157f-400c-9c9a-01c5bd7302d2)
Dec 14 11:26:38 ip-10-31-43-117.us-east-1.aws.redhat.com NetworkManager[703]: <info>  [1734193598.7760] Read config: /etc/NetworkManager/NetworkManager.conf, /etc/NetworkManager/conf.d/30-cloud-init-ip6-addr-gen-mode.conf
Dec 14 11:26:38 ip-10-31-43-117.us-east-1.aws.redhat.com NetworkManager[703]: <info>  [1734193598.7889] manager[0x557f01cc5a10]: monitoring kernel firmware directory '/lib/firmware'.
Dec 14 11:26:38 ip-10-31-43-117.us-east-1.aws.redhat.com NetworkManager[703]: <info>  [1734193598.7922] hostname: hostname: using hostnamed
Dec 14 11:26:38 ip-10-31-43-117.us-east-1.aws.redhat.com NetworkManager[703]: <info>  [1734193598.7922] hostname: static hostname changed from (none) to "ip-10-31-43-117.us-east-1.aws.redhat.com"
Dec 14 11:26:38 ip-10-31-43-117.us-east-1.aws.redhat.com NetworkManager[703]: <info>  [1734193598.7926] dns-mgr: init: dns=default,systemd-resolved rc-manager=symlink (auto)
Dec 14 11:26:38 ip-10-31-43-117.us-east-1.aws.redhat.com NetworkManager[703]: <info>  [1734193598.7930] manager[0x557f01cc5a10]: rfkill: Wi-Fi hardware radio set enabled
Dec 14 11:26:38 ip-10-31-43-117.us-east-1.aws.redhat.com NetworkManager[703]: <info>  [1734193598.7930] manager[0x557f01cc5a10]: rfkill: WWAN hardware radio set enabled
Dec 14 11:26:38 ip-10-31-43-117.us-east-1.aws.redhat.com NetworkManager[703]: <info>  [1734193598.7986] manager: rfkill: Wi-Fi enabled by radio killswitch; enabled by state file
Dec 14 11:26:38 ip-10-31-43-117.us-east-1.aws.redhat.com NetworkManager[703]: <info>  [1734193598.7987] manager: rfkill: WWAN enabled by radio killswitch; enabled by state file
Dec 14 11:26:38 ip-10-31-43-117.us-east-1.aws.redhat.com NetworkManager[703]: <info>  [1734193598.7987] manager: Networking is enabled by state file
Dec 14 11:26:38 ip-10-31-43-117.us-east-1.aws.redhat.com NetworkManager[703]: <info>  [1734193598.8012] settings: Loaded settings plugin: keyfile (internal)
Dec 14 11:26:38 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: Starting NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service...
░░ Subject: A start job for unit NetworkManager-dispatcher.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit NetworkManager-dispatcher.service has begun execution.
░░ 
░░ The job identifier is 415.
Dec 14 11:26:38 ip-10-31-43-117.us-east-1.aws.redhat.com NetworkManager[703]: <info>  [1734193598.8087] dhcp: init: Using DHCP client 'internal'
Dec 14 11:26:38 ip-10-31-43-117.us-east-1.aws.redhat.com NetworkManager[703]: <info>  [1734193598.8090] manager: (lo): new Loopback device (/org/freedesktop/NetworkManager/Devices/1)
Dec 14 11:26:38 ip-10-31-43-117.us-east-1.aws.redhat.com NetworkManager[703]: <info>  [1734193598.8101] device (lo): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external')
Dec 14 11:26:38 ip-10-31-43-117.us-east-1.aws.redhat.com NetworkManager[703]: <info>  [1734193598.8125] device (lo): state change: unavailable -> disconnected (reason 'connection-assumed', managed-type: 'external')
Dec 14 11:26:38 ip-10-31-43-117.us-east-1.aws.redhat.com NetworkManager[703]: <info>  [1734193598.8131] device (lo): Activation: starting connection 'lo' (77f275e6-4c01-4392-ab9b-e140983cfde9)
Dec 14 11:26:38 ip-10-31-43-117.us-east-1.aws.redhat.com NetworkManager[703]: <info>  [1734193598.8137] manager: (eth0): new Ethernet device (/org/freedesktop/NetworkManager/Devices/2)
Dec 14 11:26:38 ip-10-31-43-117.us-east-1.aws.redhat.com NetworkManager[703]: <info>  [1734193598.8141] device (eth0): state change: unmanaged -> unavailable (reason 'managed', managed-type: 'external')
Dec 14 11:26:38 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: Started NetworkManager.service - Network Manager.
░░ Subject: A start job for unit NetworkManager.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit NetworkManager.service has finished successfully.
░░ 
░░ The job identifier is 205.
Dec 14 11:26:38 ip-10-31-43-117.us-east-1.aws.redhat.com NetworkManager[703]: <info>  [1734193598.8174] bus-manager: acquired D-Bus service "org.freedesktop.NetworkManager"
Dec 14 11:26:38 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: Reached target network.target - Network.
░░ Subject: A start job for unit network.target has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit network.target has finished successfully.
░░ 
░░ The job identifier is 208.
Dec 14 11:26:38 ip-10-31-43-117.us-east-1.aws.redhat.com NetworkManager[703]: <info>  [1734193598.8196] device (lo): state change: disconnected -> prepare (reason 'none', managed-type: 'external')
Dec 14 11:26:38 ip-10-31-43-117.us-east-1.aws.redhat.com NetworkManager[703]: <info>  [1734193598.8198] device (lo): state change: prepare -> config (reason 'none', managed-type: 'external')
Dec 14 11:26:38 ip-10-31-43-117.us-east-1.aws.redhat.com NetworkManager[703]: <info>  [1734193598.8200] device (lo): state change: config -> ip-config (reason 'none', managed-type: 'external')
Dec 14 11:26:38 ip-10-31-43-117.us-east-1.aws.redhat.com NetworkManager[703]: <info>  [1734193598.8201] device (eth0): carrier: link connected
Dec 14 11:26:38 ip-10-31-43-117.us-east-1.aws.redhat.com NetworkManager[703]: <info>  [1734193598.8203] device (lo): state change: ip-config -> ip-check (reason 'none', managed-type: 'external')
Dec 14 11:26:38 ip-10-31-43-117.us-east-1.aws.redhat.com NetworkManager[703]: <info>  [1734193598.8218] device (eth0): state change: unavailable -> disconnected (reason 'carrier-changed', managed-type: 'full')
Dec 14 11:26:38 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: Starting NetworkManager-wait-online.service - Network Manager Wait Online...
░░ Subject: A start job for unit NetworkManager-wait-online.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit NetworkManager-wait-online.service has begun execution.
░░ 
░░ The job identifier is 204.
Dec 14 11:26:38 ip-10-31-43-117.us-east-1.aws.redhat.com NetworkManager[703]: <info>  [1734193598.8237] policy: auto-activating connection 'cloud-init eth0' (1dd9a779-d327-56e1-8454-c65e2556c12c)
Dec 14 11:26:38 ip-10-31-43-117.us-east-1.aws.redhat.com NetworkManager[703]: <info>  [1734193598.8242] device (eth0): Activation: starting connection 'cloud-init eth0' (1dd9a779-d327-56e1-8454-c65e2556c12c)
Dec 14 11:26:38 ip-10-31-43-117.us-east-1.aws.redhat.com NetworkManager[703]: <info>  [1734193598.8245] device (eth0): state change: disconnected -> prepare (reason 'none', managed-type: 'full')
Dec 14 11:26:38 ip-10-31-43-117.us-east-1.aws.redhat.com NetworkManager[703]: <info>  [1734193598.8248] manager: NetworkManager state is now CONNECTING
Dec 14 11:26:38 ip-10-31-43-117.us-east-1.aws.redhat.com NetworkManager[703]: <info>  [1734193598.8252] device (eth0): state change: prepare -> config (reason 'none', managed-type: 'full')
Dec 14 11:26:38 ip-10-31-43-117.us-east-1.aws.redhat.com NetworkManager[703]: <info>  [1734193598.8261] device (eth0): state change: config -> ip-config (reason 'none', managed-type: 'full')
Dec 14 11:26:38 ip-10-31-43-117.us-east-1.aws.redhat.com NetworkManager[703]: <info>  [1734193598.8272] dhcp4 (eth0): activation: beginning transaction (timeout in 45 seconds)
Dec 14 11:26:38 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: Starting gssproxy.service - GSSAPI Proxy Daemon...
░░ Subject: A start job for unit gssproxy.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit gssproxy.service has begun execution.
░░ 
░░ The job identifier is 244.
Dec 14 11:26:38 ip-10-31-43-117.us-east-1.aws.redhat.com NetworkManager[703]: <info>  [1734193598.8299] dhcp4 (eth0): state changed new lease, address=10.31.43.117, acd pending
Dec 14 11:26:38 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: Started gssproxy.service - GSSAPI Proxy Daemon.
░░ Subject: A start job for unit gssproxy.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit gssproxy.service has finished successfully.
░░ 
░░ The job identifier is 244.
Dec 14 11:26:38 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: rpc-gssd.service - RPC security service for NFS client and server was skipped because of an unmet condition check (ConditionPathExists=/etc/krb5.keytab).
░░ Subject: A start job for unit rpc-gssd.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit rpc-gssd.service has finished successfully.
░░ 
░░ The job identifier is 245.
Dec 14 11:26:38 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: Reached target nfs-client.target - NFS client services.
░░ Subject: A start job for unit nfs-client.target has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit nfs-client.target has finished successfully.
░░ 
░░ The job identifier is 241.
Dec 14 11:26:38 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: Reached target remote-fs-pre.target - Preparation for Remote File Systems.
░░ Subject: A start job for unit remote-fs-pre.target has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit remote-fs-pre.target has finished successfully.
░░ 
░░ The job identifier is 249.
Dec 14 11:26:38 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: Reached target remote-cryptsetup.target - Remote Encrypted Volumes.
░░ Subject: A start job for unit remote-cryptsetup.target has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit remote-cryptsetup.target has finished successfully.
░░ 
░░ The job identifier is 260.
Dec 14 11:26:38 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: Reached target remote-fs.target - Remote File Systems.
░░ Subject: A start job for unit remote-fs.target has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit remote-fs.target has finished successfully.
░░ 
░░ The job identifier is 271.
Dec 14 11:26:38 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: systemd-pcrphase.service - TPM PCR Barrier (User) was skipped because of an unmet condition check (ConditionSecurity=measured-uki).
░░ Subject: A start job for unit systemd-pcrphase.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit systemd-pcrphase.service has finished successfully.
░░ 
░░ The job identifier is 170.
Dec 14 11:26:38 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: Started NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service.
░░ Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit NetworkManager-dispatcher.service has finished successfully.
░░ 
░░ The job identifier is 415.
Dec 14 11:26:38 ip-10-31-43-117.us-east-1.aws.redhat.com NetworkManager[703]: <info>  [1734193598.9503] device (lo): state change: ip-check -> secondaries (reason 'none', managed-type: 'external')
Dec 14 11:26:38 ip-10-31-43-117.us-east-1.aws.redhat.com NetworkManager[703]: <info>  [1734193598.9512] device (lo): state change: secondaries -> activated (reason 'none', managed-type: 'external')
Dec 14 11:26:38 ip-10-31-43-117.us-east-1.aws.redhat.com NetworkManager[703]: <info>  [1734193598.9539] device (lo): Activation: successful, device activated.
Dec 14 11:26:38 ip-10-31-43-117.us-east-1.aws.redhat.com NetworkManager[703]: <info>  [1734193598.9964] dhcp4 (eth0): state changed new lease, address=10.31.43.117
Dec 14 11:26:38 ip-10-31-43-117.us-east-1.aws.redhat.com NetworkManager[703]: <info>  [1734193598.9975] policy: set 'cloud-init eth0' (eth0) as default for IPv4 routing and DNS
Dec 14 11:26:39 ip-10-31-43-117.us-east-1.aws.redhat.com NetworkManager[703]: <info>  [1734193599.0371] device (eth0): state change: ip-config -> ip-check (reason 'none', managed-type: 'full')
Dec 14 11:26:39 ip-10-31-43-117.us-east-1.aws.redhat.com NetworkManager[703]: <info>  [1734193599.0438] device (eth0): state change: ip-check -> secondaries (reason 'none', managed-type: 'full')
Dec 14 11:26:39 ip-10-31-43-117.us-east-1.aws.redhat.com NetworkManager[703]: <info>  [1734193599.0445] device (eth0): state change: secondaries -> activated (reason 'none', managed-type: 'full')
Dec 14 11:26:39 ip-10-31-43-117.us-east-1.aws.redhat.com NetworkManager[703]: <info>  [1734193599.0450] manager: NetworkManager state is now CONNECTED_SITE
Dec 14 11:26:39 ip-10-31-43-117.us-east-1.aws.redhat.com NetworkManager[703]: <info>  [1734193599.0460] device (eth0): Activation: successful, device activated.
Dec 14 11:26:39 ip-10-31-43-117.us-east-1.aws.redhat.com NetworkManager[703]: <info>  [1734193599.0466] manager: NetworkManager state is now CONNECTED_GLOBAL
Dec 14 11:26:39 ip-10-31-43-117.us-east-1.aws.redhat.com NetworkManager[703]: <info>  [1734193599.0468] manager: startup complete
Dec 14 11:26:39 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: Finished NetworkManager-wait-online.service - Network Manager Wait Online.
░░ Subject: A start job for unit NetworkManager-wait-online.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit NetworkManager-wait-online.service has finished successfully.
░░ 
░░ The job identifier is 204.
Dec 14 11:26:39 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: Starting cloud-init.service - Initial cloud-init job (metadata service crawler)...
░░ Subject: A start job for unit cloud-init.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit cloud-init.service has begun execution.
░░ 
░░ The job identifier is 274.
Dec 14 11:26:39 ip-10-31-43-117.us-east-1.aws.redhat.com chronyd[664]: Added source 10.11.160.238
Dec 14 11:26:39 ip-10-31-43-117.us-east-1.aws.redhat.com chronyd[664]: Added source 10.18.100.10
Dec 14 11:26:39 ip-10-31-43-117.us-east-1.aws.redhat.com chronyd[664]: Added source 10.2.32.37
Dec 14 11:26:39 ip-10-31-43-117.us-east-1.aws.redhat.com chronyd[664]: Added source 10.2.32.38
Dec 14 11:26:39 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: Cloud-init v. 24.1.4-21.el10 running 'init' at Sat, 14 Dec 2024 16:26:39 +0000. Up 15.52 seconds.
Dec 14 11:26:39 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: ci-info: ++++++++++++++++++++++++++++++++++++++Net device info+++++++++++++++++++++++++++++++++++++++
Dec 14 11:26:39 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: ci-info: +--------+------+-----------------------------+---------------+--------+-------------------+
Dec 14 11:26:39 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: ci-info: | Device |  Up  |           Address           |      Mask     | Scope  |     Hw-Address    |
Dec 14 11:26:39 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: ci-info: +--------+------+-----------------------------+---------------+--------+-------------------+
Dec 14 11:26:39 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: ci-info: |  eth0  | True |         10.31.43.117        | 255.255.252.0 | global | 0e:03:6a:4a:4d:55 |
Dec 14 11:26:39 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: ci-info: |  eth0  | True | fe80::c03:6aff:fe4a:4d55/64 |       .       |  link  | 0e:03:6a:4a:4d:55 |
Dec 14 11:26:39 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: ci-info: |   lo   | True |          127.0.0.1          |   255.0.0.0   |  host  |         .         |
Dec 14 11:26:39 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: ci-info: |   lo   | True |           ::1/128           |       .       |  host  |         .         |
Dec 14 11:26:39 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: ci-info: +--------+------+-----------------------------+---------------+--------+-------------------+
Dec 14 11:26:39 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: ci-info: ++++++++++++++++++++++++++++Route IPv4 info+++++++++++++++++++++++++++++
Dec 14 11:26:39 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: ci-info: +-------+-------------+------------+---------------+-----------+-------+
Dec 14 11:26:39 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: ci-info: | Route | Destination |  Gateway   |    Genmask    | Interface | Flags |
Dec 14 11:26:39 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: ci-info: +-------+-------------+------------+---------------+-----------+-------+
Dec 14 11:26:39 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: ci-info: |   0   |   0.0.0.0   | 10.31.40.1 |    0.0.0.0    |    eth0   |   UG  |
Dec 14 11:26:39 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: ci-info: |   1   |  10.31.40.0 |  0.0.0.0   | 255.255.252.0 |    eth0   |   U   |
Dec 14 11:26:39 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: ci-info: +-------+-------------+------------+---------------+-----------+-------+
Dec 14 11:26:39 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: ci-info: +++++++++++++++++++Route IPv6 info+++++++++++++++++++
Dec 14 11:26:39 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: ci-info: +-------+-------------+---------+-----------+-------+
Dec 14 11:26:39 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: ci-info: | Route | Destination | Gateway | Interface | Flags |
Dec 14 11:26:39 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: ci-info: +-------+-------------+---------+-----------+-------+
Dec 14 11:26:39 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: ci-info: |   0   |  fe80::/64  |    ::   |    eth0   |   U   |
Dec 14 11:26:39 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: ci-info: |   2   |  multicast  |    ::   |    eth0   |   U   |
Dec 14 11:26:39 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: ci-info: +-------+-------------+---------+-----------+-------+
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: Generating public/private rsa key pair.
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: Your identification has been saved in /etc/ssh/ssh_host_rsa_key
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: Your public key has been saved in /etc/ssh/ssh_host_rsa_key.pub
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: The key fingerprint is:
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: SHA256:4kPeOc6dyUInlbtyxVsYnSTAiNqxtK7A/xVLG9s/emE root@ip-10-31-43-117.us-east-1.aws.redhat.com
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: The key's randomart image is:
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: +---[RSA 3072]----+
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: |       . o..     |
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: |      + . . . .  |
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: |     + +   . + . |
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: |    . +   o . o  |
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: | .   .o S. o o   |
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: |  o  +.+oOo E .  |
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: |   o .+.Oo.+ +   |
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: |    o  =o+o++    |
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: |     .. o+*o..   |
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: +----[SHA256]-----+
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: Generating public/private ecdsa key pair.
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: Your identification has been saved in /etc/ssh/ssh_host_ecdsa_key
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: Your public key has been saved in /etc/ssh/ssh_host_ecdsa_key.pub
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: The key fingerprint is:
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: SHA256:RdoWwMOSgw51mBD28FuLabo0FGe7XvtI9kDaq60uA5s root@ip-10-31-43-117.us-east-1.aws.redhat.com
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: The key's randomart image is:
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: +---[ECDSA 256]---+
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: |  =+.+.+..o      |
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: | ..++.+ ++ .     |
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: |  .o= .o..+      |
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: |   +.* . o       |
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: |  . * o S        |
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: |.. o =           |
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: | ++ o *          |
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: |E.o+ = *         |
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: |  .+=o=.o        |
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: +----[SHA256]-----+
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: Generating public/private ed25519 key pair.
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: Your identification has been saved in /etc/ssh/ssh_host_ed25519_key
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: Your public key has been saved in /etc/ssh/ssh_host_ed25519_key.pub
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: The key fingerprint is:
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: SHA256:v1uLzZ9r22pXF2QsO+gHOGERom4ErsGOiGgKtl7LE5E root@ip-10-31-43-117.us-east-1.aws.redhat.com
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: The key's randomart image is:
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: +--[ED25519 256]--+
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: |   .   . oo   .  |
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: |. . . . .o   . + |
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: | o ..o  . o . =  |
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: |* oEo    o o o . |
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: |*=  .o  S o . . .|
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: |= ...    . . .  o|
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: |.. ..     . o   o|
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: |. o..      * .oo.|
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: | . o.     +.++*=.|
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: +----[SHA256]-----+
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: Finished cloud-init.service - Initial cloud-init job (metadata service crawler).
░░ Subject: A start job for unit cloud-init.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit cloud-init.service has finished successfully.
░░ 
░░ The job identifier is 274.
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: Reached target cloud-config.target - Cloud-config availability.
░░ Subject: A start job for unit cloud-config.target has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit cloud-config.target has finished successfully.
░░ 
░░ The job identifier is 277.
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: Reached target network-online.target - Network is Online.
░░ Subject: A start job for unit network-online.target has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit network-online.target has finished successfully.
░░ 
░░ The job identifier is 203.
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: Starting cloud-config.service - Apply the settings specified in cloud-config...
░░ Subject: A start job for unit cloud-config.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit cloud-config.service has begun execution.
░░ 
░░ The job identifier is 276.
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: Starting kdump.service - Crash recovery kernel arming...
░░ Subject: A start job for unit kdump.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit kdump.service has begun execution.
░░ 
░░ The job identifier is 256.
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: Starting restraintd.service - The restraint harness....
░░ Subject: A start job for unit restraintd.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit restraintd.service has begun execution.
░░ 
░░ The job identifier is 239.
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: Starting rpc-statd-notify.service - Notify NFS peers of a restart...
░░ Subject: A start job for unit rpc-statd-notify.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit rpc-statd-notify.service has begun execution.
░░ 
░░ The job identifier is 242.
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: Starting sshd.service - OpenSSH server daemon...
░░ Subject: A start job for unit sshd.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit sshd.service has begun execution.
░░ 
░░ The job identifier is 261.
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com sm-notify[872]: Version 2.7.1 starting
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: Started rpc-statd-notify.service - Notify NFS peers of a restart.
░░ Subject: A start job for unit rpc-statd-notify.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit rpc-statd-notify.service has finished successfully.
░░ 
░░ The job identifier is 242.
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com (sshd)[873]: sshd.service: Referenced but unset environment variable evaluates to an empty string: OPTIONS
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: Started restraintd.service - The restraint harness..
░░ Subject: A start job for unit restraintd.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit restraintd.service has finished successfully.
░░ 
░░ The job identifier is 239.
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com sshd[873]: Server listening on 0.0.0.0 port 22.
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com sshd[873]: Server listening on :: port 22.
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: Started sshd.service - OpenSSH server daemon.
░░ Subject: A start job for unit sshd.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit sshd.service has finished successfully.
░░ 
░░ The job identifier is 261.
Dec 14 11:26:41 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[916]: Cloud-init v. 24.1.4-21.el10 running 'modules:config' at Sat, 14 Dec 2024 16:26:41 +0000. Up 17.21 seconds.
Dec 14 11:26:41 ip-10-31-43-117.us-east-1.aws.redhat.com sshd[873]: Received signal 15; terminating.
Dec 14 11:26:41 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: Stopping sshd.service - OpenSSH server daemon...
░░ Subject: A stop job for unit sshd.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A stop job for unit sshd.service has begun execution.
░░ 
░░ The job identifier is 507.
Dec 14 11:26:41 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: sshd.service: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ The unit sshd.service has successfully entered the 'dead' state.
Dec 14 11:26:41 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: Stopped sshd.service - OpenSSH server daemon.
░░ Subject: A stop job for unit sshd.service has finished
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A stop job for unit sshd.service has finished.
░░ 
░░ The job identifier is 507 and the job result is done.
Dec 14 11:26:41 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: Stopped target sshd-keygen.target.
░░ Subject: A stop job for unit sshd-keygen.target has finished
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A stop job for unit sshd-keygen.target has finished.
░░ 
░░ The job identifier is 591 and the job result is done.
Dec 14 11:26:41 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: Stopping sshd-keygen.target...
░░ Subject: A stop job for unit sshd-keygen.target has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A stop job for unit sshd-keygen.target has begun execution.
░░ 
░░ The job identifier is 591.
Dec 14 11:26:41 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: ssh-host-keys-migration.service - Update OpenSSH host key permissions was skipped because of an unmet condition check (ConditionPathExists=!/var/lib/.ssh-host-keys-migration).
░░ Subject: A start job for unit ssh-host-keys-migration.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit ssh-host-keys-migration.service has finished successfully.
░░ 
░░ The job identifier is 590.
Dec 14 11:26:41 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: sshd-keygen@ecdsa.service - OpenSSH ecdsa Server Key Generation was skipped because of an unmet condition check (ConditionPathExists=!/run/systemd/generator.early/multi-user.target.wants/cloud-init.target).
░░ Subject: A start job for unit sshd-keygen@ecdsa.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit sshd-keygen@ecdsa.service has finished successfully.
░░ 
░░ The job identifier is 586.
Dec 14 11:26:41 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: sshd-keygen@ed25519.service - OpenSSH ed25519 Server Key Generation was skipped because of an unmet condition check (ConditionPathExists=!/run/systemd/generator.early/multi-user.target.wants/cloud-init.target).
░░ Subject: A start job for unit sshd-keygen@ed25519.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit sshd-keygen@ed25519.service has finished successfully.
░░ 
░░ The job identifier is 588.
Dec 14 11:26:41 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: sshd-keygen@rsa.service - OpenSSH rsa Server Key Generation was skipped because of an unmet condition check (ConditionPathExists=!/run/systemd/generator.early/multi-user.target.wants/cloud-init.target).
░░ Subject: A start job for unit sshd-keygen@rsa.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit sshd-keygen@rsa.service has finished successfully.
░░ 
░░ The job identifier is 589.
Dec 14 11:26:41 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: Reached target sshd-keygen.target.
░░ Subject: A start job for unit sshd-keygen.target has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit sshd-keygen.target has finished successfully.
░░ 
░░ The job identifier is 591.
Dec 14 11:26:41 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: Starting sshd.service - OpenSSH server daemon...
░░ Subject: A start job for unit sshd.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit sshd.service has begun execution.
░░ 
░░ The job identifier is 507.
Dec 14 11:26:41 ip-10-31-43-117.us-east-1.aws.redhat.com (sshd)[920]: sshd.service: Referenced but unset environment variable evaluates to an empty string: OPTIONS
Dec 14 11:26:41 ip-10-31-43-117.us-east-1.aws.redhat.com sshd[920]: Server listening on 0.0.0.0 port 22.
Dec 14 11:26:41 ip-10-31-43-117.us-east-1.aws.redhat.com sshd[920]: Server listening on :: port 22.
Dec 14 11:26:41 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: Started sshd.service - OpenSSH server daemon.
░░ Subject: A start job for unit sshd.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit sshd.service has finished successfully.
░░ 
░░ The job identifier is 507.
Dec 14 11:26:41 ip-10-31-43-117.us-east-1.aws.redhat.com restraintd[877]: Listening on http://localhost:8081
Dec 14 11:26:41 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: Finished cloud-config.service - Apply the settings specified in cloud-config.
░░ Subject: A start job for unit cloud-config.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit cloud-config.service has finished successfully.
░░ 
░░ The job identifier is 276.
Dec 14 11:26:41 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: Starting cloud-final.service - Execute cloud user/final scripts...
░░ Subject: A start job for unit cloud-final.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit cloud-final.service has begun execution.
░░ 
░░ The job identifier is 278.
Dec 14 11:26:41 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: Starting systemd-user-sessions.service - Permit User Sessions...
░░ Subject: A start job for unit systemd-user-sessions.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit systemd-user-sessions.service has begun execution.
░░ 
░░ The job identifier is 240.
Dec 14 11:26:41 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: Finished systemd-user-sessions.service - Permit User Sessions.
░░ Subject: A start job for unit systemd-user-sessions.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit systemd-user-sessions.service has finished successfully.
░░ 
░░ The job identifier is 240.
Dec 14 11:26:41 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: Started crond.service - Command Scheduler.
░░ Subject: A start job for unit crond.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit crond.service has finished successfully.
░░ 
░░ The job identifier is 255.
Dec 14 11:26:41 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: Started getty@tty1.service - Getty on tty1.
░░ Subject: A start job for unit getty@tty1.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit getty@tty1.service has finished successfully.
░░ 
░░ The job identifier is 227.
Dec 14 11:26:41 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: Started serial-getty@ttyS0.service - Serial Getty on ttyS0.
░░ Subject: A start job for unit serial-getty@ttyS0.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit serial-getty@ttyS0.service has finished successfully.
░░ 
░░ The job identifier is 231.
Dec 14 11:26:41 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: Reached target getty.target - Login Prompts.
░░ Subject: A start job for unit getty.target has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit getty.target has finished successfully.
░░ 
░░ The job identifier is 226.
Dec 14 11:26:41 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: Reached target multi-user.target - Multi-User System.
░░ Subject: A start job for unit multi-user.target has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit multi-user.target has finished successfully.
░░ 
░░ The job identifier is 121.
Dec 14 11:26:41 ip-10-31-43-117.us-east-1.aws.redhat.com crond[926]: (CRON) STARTUP (1.7.0)
Dec 14 11:26:41 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: Starting systemd-update-utmp-runlevel.service - Record Runlevel Change in UTMP...
░░ Subject: A start job for unit systemd-update-utmp-runlevel.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit systemd-update-utmp-runlevel.service has begun execution.
░░ 
░░ The job identifier is 257.
Dec 14 11:26:41 ip-10-31-43-117.us-east-1.aws.redhat.com crond[926]: (CRON) INFO (Syslog will be used instead of sendmail.)
Dec 14 11:26:41 ip-10-31-43-117.us-east-1.aws.redhat.com crond[926]: (CRON) INFO (RANDOM_DELAY will be scaled with factor 98% if used.)
Dec 14 11:26:41 ip-10-31-43-117.us-east-1.aws.redhat.com crond[926]: (CRON) INFO (running with inotify support)
Dec 14 11:26:41 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: systemd-update-utmp-runlevel.service: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ The unit systemd-update-utmp-runlevel.service has successfully entered the 'dead' state.
Dec 14 11:26:41 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: Finished systemd-update-utmp-runlevel.service - Record Runlevel Change in UTMP.
░░ Subject: A start job for unit systemd-update-utmp-runlevel.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit systemd-update-utmp-runlevel.service has finished successfully.
░░ 
░░ The job identifier is 257.
Dec 14 11:26:41 ip-10-31-43-117.us-east-1.aws.redhat.com kdumpctl[879]: kdump: Detected change(s) in the following file(s):  /etc/fstab
Dec 14 11:26:41 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[1020]: Cloud-init v. 24.1.4-21.el10 running 'modules:final' at Sat, 14 Dec 2024 16:26:41 +0000. Up 17.71 seconds.
Dec 14 11:26:41 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[1076]: #############################################################
Dec 14 11:26:41 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[1077]: -----BEGIN SSH HOST KEY FINGERPRINTS-----
Dec 14 11:26:41 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[1079]: 256 SHA256:RdoWwMOSgw51mBD28FuLabo0FGe7XvtI9kDaq60uA5s root@ip-10-31-43-117.us-east-1.aws.redhat.com (ECDSA)
Dec 14 11:26:41 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[1085]: 256 SHA256:v1uLzZ9r22pXF2QsO+gHOGERom4ErsGOiGgKtl7LE5E root@ip-10-31-43-117.us-east-1.aws.redhat.com (ED25519)
Dec 14 11:26:41 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[1090]: 3072 SHA256:4kPeOc6dyUInlbtyxVsYnSTAiNqxtK7A/xVLG9s/emE root@ip-10-31-43-117.us-east-1.aws.redhat.com (RSA)
Dec 14 11:26:41 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[1092]: -----END SSH HOST KEY FINGERPRINTS-----
Dec 14 11:26:41 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[1095]: #############################################################
Dec 14 11:26:41 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[1020]: Cloud-init v. 24.1.4-21.el10 finished at Sat, 14 Dec 2024 16:26:41 +0000. Datasource DataSourceEc2Local.  Up 17.88 seconds
Dec 14 11:26:41 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: Finished cloud-final.service - Execute cloud user/final scripts.
░░ Subject: A start job for unit cloud-final.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit cloud-final.service has finished successfully.
░░ 
░░ The job identifier is 278.
Dec 14 11:26:41 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: Reached target cloud-init.target - Cloud-init target.
░░ Subject: A start job for unit cloud-init.target has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit cloud-init.target has finished successfully.
░░ 
░░ The job identifier is 273.
Dec 14 11:26:43 ip-10-31-43-117.us-east-1.aws.redhat.com irqbalance[649]: Cannot change IRQ 0 affinity: Permission denied
Dec 14 11:26:43 ip-10-31-43-117.us-east-1.aws.redhat.com irqbalance[649]: IRQ 0 affinity is now unmanaged
Dec 14 11:26:43 ip-10-31-43-117.us-east-1.aws.redhat.com irqbalance[649]: Cannot change IRQ 48 affinity: Permission denied
Dec 14 11:26:43 ip-10-31-43-117.us-east-1.aws.redhat.com irqbalance[649]: IRQ 48 affinity is now unmanaged
Dec 14 11:26:43 ip-10-31-43-117.us-east-1.aws.redhat.com irqbalance[649]: Cannot change IRQ 49 affinity: Permission denied
Dec 14 11:26:43 ip-10-31-43-117.us-east-1.aws.redhat.com irqbalance[649]: IRQ 49 affinity is now unmanaged
Dec 14 11:26:43 ip-10-31-43-117.us-east-1.aws.redhat.com irqbalance[649]: Cannot change IRQ 50 affinity: Permission denied
Dec 14 11:26:43 ip-10-31-43-117.us-east-1.aws.redhat.com irqbalance[649]: IRQ 50 affinity is now unmanaged
Dec 14 11:26:43 ip-10-31-43-117.us-east-1.aws.redhat.com irqbalance[649]: Cannot change IRQ 51 affinity: Permission denied
Dec 14 11:26:43 ip-10-31-43-117.us-east-1.aws.redhat.com irqbalance[649]: IRQ 51 affinity is now unmanaged
Dec 14 11:26:43 ip-10-31-43-117.us-east-1.aws.redhat.com irqbalance[649]: Cannot change IRQ 52 affinity: Permission denied
Dec 14 11:26:43 ip-10-31-43-117.us-east-1.aws.redhat.com irqbalance[649]: IRQ 52 affinity is now unmanaged
Dec 14 11:26:43 ip-10-31-43-117.us-east-1.aws.redhat.com irqbalance[649]: Cannot change IRQ 53 affinity: Permission denied
Dec 14 11:26:43 ip-10-31-43-117.us-east-1.aws.redhat.com irqbalance[649]: IRQ 53 affinity is now unmanaged
Dec 14 11:26:43 ip-10-31-43-117.us-east-1.aws.redhat.com irqbalance[649]: Cannot change IRQ 54 affinity: Permission denied
Dec 14 11:26:43 ip-10-31-43-117.us-east-1.aws.redhat.com irqbalance[649]: IRQ 54 affinity is now unmanaged
Dec 14 11:26:43 ip-10-31-43-117.us-east-1.aws.redhat.com irqbalance[649]: Cannot change IRQ 55 affinity: Permission denied
Dec 14 11:26:43 ip-10-31-43-117.us-east-1.aws.redhat.com irqbalance[649]: IRQ 55 affinity is now unmanaged
Dec 14 11:26:43 ip-10-31-43-117.us-east-1.aws.redhat.com irqbalance[649]: Cannot change IRQ 56 affinity: Permission denied
Dec 14 11:26:43 ip-10-31-43-117.us-east-1.aws.redhat.com irqbalance[649]: IRQ 56 affinity is now unmanaged
Dec 14 11:26:43 ip-10-31-43-117.us-east-1.aws.redhat.com irqbalance[649]: Cannot change IRQ 57 affinity: Permission denied
Dec 14 11:26:43 ip-10-31-43-117.us-east-1.aws.redhat.com irqbalance[649]: IRQ 57 affinity is now unmanaged
Dec 14 11:26:43 ip-10-31-43-117.us-east-1.aws.redhat.com irqbalance[649]: Cannot change IRQ 58 affinity: Permission denied
Dec 14 11:26:43 ip-10-31-43-117.us-east-1.aws.redhat.com irqbalance[649]: IRQ 58 affinity is now unmanaged
Dec 14 11:26:43 ip-10-31-43-117.us-east-1.aws.redhat.com irqbalance[649]: Cannot change IRQ 59 affinity: Permission denied
Dec 14 11:26:43 ip-10-31-43-117.us-east-1.aws.redhat.com irqbalance[649]: IRQ 59 affinity is now unmanaged
Dec 14 11:26:44 ip-10-31-43-117.us-east-1.aws.redhat.com kernel: block xvda: the capability attribute has been deprecated.
Dec 14 11:26:45 ip-10-31-43-117.us-east-1.aws.redhat.com chronyd[664]: Selected source 10.2.32.38
Dec 14 11:26:45 ip-10-31-43-117.us-east-1.aws.redhat.com kdumpctl[879]: kdump: Rebuilding /boot/initramfs-6.12.0-31.el10.x86_64kdump.img
Dec 14 11:26:45 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1384]: dracut-103-1.el10
Dec 14 11:26:45 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1387]: Executing: /usr/bin/dracut --list-modules
Dec 14 11:26:45 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1458]: dracut-103-1.el10
Dec 14 11:26:45 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Executing: /usr/bin/dracut --add kdumpbase --quiet --hostonly --hostonly-cmdline --hostonly-i18n --hostonly-mode strict --hostonly-nics  --aggressive-strip --omit "rdma plymouth resume ifcfg earlykdump" --mount "/dev/disk/by-uuid/f3bb1e80-fac3-4b5e-93f6-d763469176c6 /sysroot xfs rw,relatime,seclabel,attr2,inode64,logbufs=8,logbsize=32k,noquota" --add squash-squashfs --squash-compressor zstd --no-hostonly-default-device -f /boot/initramfs-6.12.0-31.el10.x86_64kdump.img 6.12.0-31.el10.x86_64
Dec 14 11:26:45 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Module 'systemd-bsod' will not be installed, because command '/usr/lib/systemd/systemd-bsod' could not be found!
Dec 14 11:26:45 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Module 'systemd-networkd' will not be installed, because command '/usr/lib/systemd/systemd-networkd' could not be found!
Dec 14 11:26:45 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Module 'systemd-networkd' will not be installed, because command '/usr/lib/systemd/systemd-networkd-wait-online' could not be found!
Dec 14 11:26:45 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Module 'systemd-pcrphase' will not be installed, because command '/usr/lib/systemd/systemd-pcrphase' could not be found!
Dec 14 11:26:45 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Module 'systemd-portabled' will not be installed, because command 'portablectl' could not be found!
Dec 14 11:26:45 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Module 'systemd-portabled' will not be installed, because command '/usr/lib/systemd/systemd-portabled' could not be found!
Dec 14 11:26:45 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Module 'systemd-resolved' will not be installed, because command '/usr/lib/systemd/systemd-resolved' could not be found!
Dec 14 11:26:45 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Module 'systemd-timesyncd' will not be installed, because command '/usr/lib/systemd/systemd-timesyncd' could not be found!
Dec 14 11:26:45 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Module 'systemd-timesyncd' will not be installed, because command '/usr/lib/systemd/systemd-time-wait-sync' could not be found!
Dec 14 11:26:45 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Module 'busybox' will not be installed, because command 'busybox' could not be found!
Dec 14 11:26:45 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Module 'dbus-daemon' will not be installed, because command 'dbus-daemon' could not be found!
Dec 14 11:26:46 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Module 'connman' will not be installed, because command 'connmand' could not be found!
Dec 14 11:26:46 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Module 'connman' will not be installed, because command 'connmanctl' could not be found!
Dec 14 11:26:46 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Module 'connman' will not be installed, because command 'connmand-wait-online' could not be found!
Dec 14 11:26:46 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Module 'ifcfg' will not be installed, because it's in the list to be omitted!
Dec 14 11:26:46 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Module 'plymouth' will not be installed, because it's in the list to be omitted!
Dec 14 11:26:46 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: 62bluetooth: Could not find any command of '/usr/lib/bluetooth/bluetoothd /usr/libexec/bluetooth/bluetoothd'!
Dec 14 11:26:46 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Module 'btrfs' will not be installed, because command 'btrfs' could not be found!
Dec 14 11:26:46 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Module 'dmraid' will not be installed, because command 'dmraid' could not be found!
Dec 14 11:26:46 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Module 'mdraid' will not be installed, because command 'mdadm' could not be found!
Dec 14 11:26:46 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Module 'multipath' will not be installed, because command 'multipath' could not be found!
Dec 14 11:26:46 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Module 'cifs' will not be installed, because command 'mount.cifs' could not be found!
Dec 14 11:26:46 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Module 'iscsi' will not be installed, because command 'iscsi-iname' could not be found!
Dec 14 11:26:46 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Module 'iscsi' will not be installed, because command 'iscsiadm' could not be found!
Dec 14 11:26:46 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Module 'iscsi' will not be installed, because command 'iscsid' could not be found!
Dec 14 11:26:46 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Module 'nvmf' will not be installed, because command 'nvme' could not be found!
Dec 14 11:26:46 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Module 'resume' will not be installed, because it's in the list to be omitted!
Dec 14 11:26:46 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Module 'squash-erofs' will not be installed, because command 'mkfs.erofs' could not be found!
Dec 14 11:26:46 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Module 'squash-erofs' will not be installed, because command 'fsck.erofs' could not be found!
Dec 14 11:26:46 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Module 'biosdevname' will not be installed, because command 'biosdevname' could not be found!
Dec 14 11:26:46 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Module 'earlykdump' will not be installed, because it's in the list to be omitted!
Dec 14 11:26:46 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Module 'systemd-bsod' will not be installed, because command '/usr/lib/systemd/systemd-bsod' could not be found!
Dec 14 11:26:46 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Module 'systemd-pcrphase' will not be installed, because command '/usr/lib/systemd/systemd-pcrphase' could not be found!
Dec 14 11:26:46 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Module 'systemd-portabled' will not be installed, because command 'portablectl' could not be found!
Dec 14 11:26:46 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Module 'systemd-portabled' will not be installed, because command '/usr/lib/systemd/systemd-portabled' could not be found!
Dec 14 11:26:46 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Module 'systemd-resolved' will not be installed, because command '/usr/lib/systemd/systemd-resolved' could not be found!
Dec 14 11:26:46 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Module 'systemd-timesyncd' will not be installed, because command '/usr/lib/systemd/systemd-timesyncd' could not be found!
Dec 14 11:26:46 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Module 'systemd-timesyncd' will not be installed, because command '/usr/lib/systemd/systemd-time-wait-sync' could not be found!
Dec 14 11:26:46 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Module 'busybox' will not be installed, because command 'busybox' could not be found!
Dec 14 11:26:46 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Module 'dbus-daemon' will not be installed, because command 'dbus-daemon' could not be found!
Dec 14 11:26:46 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Module 'connman' will not be installed, because command 'connmand' could not be found!
Dec 14 11:26:46 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Module 'connman' will not be installed, because command 'connmanctl' could not be found!
Dec 14 11:26:46 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Module 'connman' will not be installed, because command 'connmand-wait-online' could not be found!
Dec 14 11:26:46 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: 62bluetooth: Could not find any command of '/usr/lib/bluetooth/bluetoothd /usr/libexec/bluetooth/bluetoothd'!
Dec 14 11:26:46 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Module 'btrfs' will not be installed, because command 'btrfs' could not be found!
Dec 14 11:26:46 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Module 'dmraid' will not be installed, because command 'dmraid' could not be found!
Dec 14 11:26:46 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Module 'mdraid' will not be installed, because command 'mdadm' could not be found!
Dec 14 11:26:46 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Module 'multipath' will not be installed, because command 'multipath' could not be found!
Dec 14 11:26:46 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Module 'cifs' will not be installed, because command 'mount.cifs' could not be found!
Dec 14 11:26:46 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Module 'iscsi' will not be installed, because command 'iscsi-iname' could not be found!
Dec 14 11:26:46 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Module 'iscsi' will not be installed, because command 'iscsiadm' could not be found!
Dec 14 11:26:46 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Module 'iscsi' will not be installed, because command 'iscsid' could not be found!
Dec 14 11:26:46 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Module 'nvmf' will not be installed, because command 'nvme' could not be found!
Dec 14 11:26:46 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Module 'squash-erofs' will not be installed, because command 'mkfs.erofs' could not be found!
Dec 14 11:26:46 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Module 'squash-erofs' will not be installed, because command 'fsck.erofs' could not be found!
Dec 14 11:26:46 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: *** Including module: systemd ***
Dec 14 11:26:47 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: *** Including module: fips ***
Dec 14 11:26:47 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: *** Including module: fips-crypto-policies ***
Dec 14 11:26:47 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: *** Including module: systemd-ask-password ***
Dec 14 11:26:47 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: *** Including module: systemd-initrd ***
Dec 14 11:26:47 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: *** Including module: systemd-journald ***
Dec 14 11:26:47 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: *** Including module: systemd-modules-load ***
Dec 14 11:26:47 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: *** Including module: systemd-sysctl ***
Dec 14 11:26:47 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: *** Including module: systemd-sysusers ***
Dec 14 11:26:47 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: *** Including module: systemd-tmpfiles ***
Dec 14 11:26:47 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: *** Including module: systemd-udevd ***
Dec 14 11:26:47 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: *** Including module: rngd ***
Dec 14 11:26:47 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: *** Including module: i18n ***
Dec 14 11:26:47 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: *** Including module: drm ***
Dec 14 11:26:47 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: *** Including module: prefixdevname ***
Dec 14 11:26:47 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: *** Including module: kernel-modules ***
Dec 14 11:26:48 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: *** Including module: kernel-modules-extra ***
Dec 14 11:26:48 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]:   kernel-modules-extra: configuration source "/run/depmod.d" does not exist
Dec 14 11:26:48 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]:   kernel-modules-extra: configuration source "/lib/depmod.d" does not exist
Dec 14 11:26:48 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]:   kernel-modules-extra: parsing configuration file "/etc/depmod.d/dist.conf"
Dec 14 11:26:48 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]:   kernel-modules-extra: /etc/depmod.d/dist.conf: added "updates extra built-in weak-updates" to the list of search directories
Dec 14 11:26:48 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: *** Including module: pcmcia ***
Dec 14 11:26:48 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Skipping udev rule: 60-pcmcia.rules
Dec 14 11:26:48 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: *** Including module: fstab-sys ***
Dec 14 11:26:48 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: *** Including module: hwdb ***
Dec 14 11:26:48 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: *** Including module: rootfs-block ***
Dec 14 11:26:48 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: *** Including module: squash-squashfs ***
Dec 14 11:26:48 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: *** Including module: terminfo ***
Dec 14 11:26:48 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: *** Including module: udev-rules ***
Dec 14 11:26:48 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: *** Including module: dracut-systemd ***
Dec 14 11:26:48 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: *** Including module: usrmount ***
Dec 14 11:26:48 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: *** Including module: base ***
Dec 14 11:26:48 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: *** Including module: fs-lib ***
Dec 14 11:26:48 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: *** Including module: kdumpbase ***
Dec 14 11:26:49 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: *** Including module: memstrack ***
Dec 14 11:26:49 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: *** Including module: microcode_ctl-fw_dir_override ***
Dec 14 11:26:49 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]:   microcode_ctl module: mangling fw_dir
Dec 14 11:26:49 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]:     microcode_ctl: reset fw_dir to "/lib/firmware/updates /lib/firmware"
Dec 14 11:26:49 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]:     microcode_ctl: processing data directory  "/usr/share/microcode_ctl/ucode_with_caveats/intel"...
Dec 14 11:26:49 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state.
Dec 14 11:26:49 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]:       microcode_ctl: intel: caveats check for kernel version "6.12.0-31.el10.x86_64" passed, adding "/usr/share/microcode_ctl/ucode_with_caveats/intel" to fw_dir variable
Dec 14 11:26:49 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]:     microcode_ctl: processing data directory  "/usr/share/microcode_ctl/ucode_with_caveats/intel-06-4f-01"...
Dec 14 11:26:49 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]:     microcode_ctl: configuration "intel-06-4f-01" is ignored
Dec 14 11:26:49 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]:     microcode_ctl: final fw_dir: "/usr/share/microcode_ctl/ucode_with_caveats/intel /lib/firmware/updates /lib/firmware"
Dec 14 11:26:49 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: *** Including module: shutdown ***
Dec 14 11:26:49 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: *** Including module: squash-lib ***
Dec 14 11:26:49 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: *** Including modules done ***
Dec 14 11:26:49 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: *** Installing kernel module dependencies ***
Dec 14 11:26:49 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: *** Installing kernel module dependencies done ***
Dec 14 11:26:49 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: *** Resolving executable dependencies ***
Dec 14 11:26:50 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: *** Resolving executable dependencies done ***
Dec 14 11:26:50 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: *** Hardlinking files ***
Dec 14 11:26:50 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Mode:                     real
Dec 14 11:26:50 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Method:                   sha256
Dec 14 11:26:50 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Files:                    537
Dec 14 11:26:50 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Linked:                   25 files
Dec 14 11:26:50 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Compared:                 0 xattrs
Dec 14 11:26:50 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Compared:                 48 files
Dec 14 11:26:50 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Saved:                    13.58 MiB
Dec 14 11:26:50 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Duration:                 0.163206 seconds
Dec 14 11:26:50 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: *** Hardlinking files done ***
Dec 14 11:26:50 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: *** Generating early-microcode cpio image ***
Dec 14 11:26:51 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: *** Constructing GenuineIntel.bin ***
Dec 14 11:26:51 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: *** Constructing GenuineIntel.bin ***
Dec 14 11:26:51 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: *** Store current command line parameters ***
Dec 14 11:26:51 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Stored kernel commandline:
Dec 14 11:26:51 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: No dracut internal kernel commandline stored in the initramfs
Dec 14 11:26:51 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: *** Squashing the files inside the initramfs ***
Dec 14 11:26:59 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: *** Squashing the files inside the initramfs done ***
Dec 14 11:26:59 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: *** Creating image file '/boot/initramfs-6.12.0-31.el10.x86_64kdump.img' ***
Dec 14 11:26:59 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: *** Creating initramfs image file '/boot/initramfs-6.12.0-31.el10.x86_64kdump.img' done ***
Dec 14 11:26:59 ip-10-31-43-117.us-east-1.aws.redhat.com kernel: PKCS7: Message signed outside of X.509 validity window
Dec 14 11:27:00 ip-10-31-43-117.us-east-1.aws.redhat.com kdumpctl[879]: kdump: kexec: loaded kdump kernel
Dec 14 11:27:00 ip-10-31-43-117.us-east-1.aws.redhat.com kdumpctl[879]: kdump: Starting kdump: [OK]
Dec 14 11:27:00 ip-10-31-43-117.us-east-1.aws.redhat.com kdumpctl[879]: kdump: Notice: No vmcore creation test performed!
Dec 14 11:27:00 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: Finished kdump.service - Crash recovery kernel arming.
░░ Subject: A start job for unit kdump.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit kdump.service has finished successfully.
░░ 
░░ The job identifier is 256.
Dec 14 11:27:00 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: Startup finished in 1.005s (kernel) + 3.724s (initrd) + 31.760s (userspace) = 36.490s.
░░ Subject: System start-up is now complete
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ All system services necessary queued for starting at boot have been
░░ started. Note that this does not mean that the machine is now idle as services
░░ might still be busy with completing start-up.
░░ 
░░ Kernel start-up required 1005350 microseconds.
░░ 
░░ Initrd start-up required 3724049 microseconds.
░░ 
░░ Userspace start-up required 31760904 microseconds.
Dec 14 11:27:08 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: systemd-hostnamed.service: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ The unit systemd-hostnamed.service has successfully entered the 'dead' state.
Dec 14 11:27:51 ip-10-31-43-117.us-east-1.aws.redhat.com chronyd[664]: Selected source 216.66.48.42 (2.centos.pool.ntp.org)
Dec 14 11:29:16 ip-10-31-43-117.us-east-1.aws.redhat.com sshd-session[4322]: Accepted publickey for root from 10.30.34.106 port 52592 ssh2: RSA SHA256:W3cSdmPJK+d9RwU97ardijPXIZnxHswrpTHWW9oYtEU
Dec 14 11:29:16 ip-10-31-43-117.us-east-1.aws.redhat.com sshd-session[4322]: pam_systemd(sshd:session): New sd-bus connection (system-bus-pam-systemd-4322) opened.
Dec 14 11:29:16 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: Created slice user-0.slice - User Slice of UID 0.
░░ Subject: A start job for unit user-0.slice has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit user-0.slice has finished successfully.
░░ 
░░ The job identifier is 602.
Dec 14 11:29:16 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: Starting user-runtime-dir@0.service - User Runtime Directory /run/user/0...
░░ Subject: A start job for unit user-runtime-dir@0.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit user-runtime-dir@0.service has begun execution.
░░ 
░░ The job identifier is 601.
Dec 14 11:29:16 ip-10-31-43-117.us-east-1.aws.redhat.com systemd-logind[653]: New session 1 of user root.
░░ Subject: A new session 1 has been created for user root
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ Documentation: sd-login(3)
░░ 
░░ A new session with the ID 1 has been created for the user root.
░░ 
░░ The leading process of the session is 4322.
Dec 14 11:29:16 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: Finished user-runtime-dir@0.service - User Runtime Directory /run/user/0.
░░ Subject: A start job for unit user-runtime-dir@0.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit user-runtime-dir@0.service has finished successfully.
░░ 
░░ The job identifier is 601.
Dec 14 11:29:16 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: Starting user@0.service - User Manager for UID 0...
░░ Subject: A start job for unit user@0.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit user@0.service has begun execution.
░░ 
░░ The job identifier is 681.
Dec 14 11:29:16 ip-10-31-43-117.us-east-1.aws.redhat.com systemd-logind[653]: New session 2 of user root.
░░ Subject: A new session 2 has been created for user root
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ Documentation: sd-login(3)
░░ 
░░ A new session with the ID 2 has been created for the user root.
░░ 
░░ The leading process of the session is 4327.
Dec 14 11:29:16 ip-10-31-43-117.us-east-1.aws.redhat.com (systemd)[4327]: pam_unix(systemd-user:session): session opened for user root(uid=0) by root(uid=0)
Dec 14 11:29:16 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[4327]: Queued start job for default target default.target.
Dec 14 11:29:16 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[4327]: Created slice app.slice - User Application Slice.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit UNIT has finished successfully.
░░ 
░░ The job identifier is 5.
Dec 14 11:29:16 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[4327]: grub-boot-success.timer - Mark boot as successful after the user session has run 2 minutes was skipped because of an unmet condition check (ConditionUser=!@system).
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit UNIT has finished successfully.
░░ 
░░ The job identifier is 10.
Dec 14 11:29:16 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[4327]: Started systemd-tmpfiles-clean.timer - Daily Cleanup of User's Temporary Directories.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit UNIT has finished successfully.
░░ 
░░ The job identifier is 11.
Dec 14 11:29:16 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[4327]: Reached target paths.target - Paths.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit UNIT has finished successfully.
░░ 
░░ The job identifier is 12.
Dec 14 11:29:16 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[4327]: Reached target timers.target - Timers.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit UNIT has finished successfully.
░░ 
░░ The job identifier is 9.
Dec 14 11:29:16 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[4327]: Starting dbus.socket - D-Bus User Message Bus Socket...
░░ Subject: A start job for unit UNIT has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit UNIT has begun execution.
░░ 
░░ The job identifier is 4.
Dec 14 11:29:16 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[4327]: Starting systemd-tmpfiles-setup.service - Create User Files and Directories...
░░ Subject: A start job for unit UNIT has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit UNIT has begun execution.
░░ 
░░ The job identifier is 8.
Dec 14 11:29:16 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[4327]: Finished systemd-tmpfiles-setup.service - Create User Files and Directories.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit UNIT has finished successfully.
░░ 
░░ The job identifier is 8.
Dec 14 11:29:16 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[4327]: Listening on dbus.socket - D-Bus User Message Bus Socket.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit UNIT has finished successfully.
░░ 
░░ The job identifier is 4.
Dec 14 11:29:16 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[4327]: Reached target sockets.target - Sockets.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit UNIT has finished successfully.
░░ 
░░ The job identifier is 3.
Dec 14 11:29:16 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[4327]: Reached target basic.target - Basic System.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit UNIT has finished successfully.
░░ 
░░ The job identifier is 2.
Dec 14 11:29:16 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[4327]: Reached target default.target - Main User Target.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit UNIT has finished successfully.
░░ 
░░ The job identifier is 1.
Dec 14 11:29:16 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[4327]: Startup finished in 127ms.
░░ Subject: User manager start-up is now complete
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ The user manager instance for user 0 has been started. All services queued
░░ for starting have been started. Note that other services might still be starting
░░ up or be started at any later time.
░░ 
░░ Startup of the manager took 127888 microseconds.
Dec 14 11:29:16 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: Started user@0.service - User Manager for UID 0.
░░ Subject: A start job for unit user@0.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit user@0.service has finished successfully.
░░ 
░░ The job identifier is 681.
Dec 14 11:29:16 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: Started session-1.scope - Session 1 of User root.
░░ Subject: A start job for unit session-1.scope has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit session-1.scope has finished successfully.
░░ 
░░ The job identifier is 762.
Dec 14 11:29:16 ip-10-31-43-117.us-east-1.aws.redhat.com sshd-session[4322]: pam_unix(sshd:session): session opened for user root(uid=0) by root(uid=0)
Dec 14 11:29:16 ip-10-31-43-117.us-east-1.aws.redhat.com sshd-session[4338]: Received disconnect from 10.30.34.106 port 52592:11: disconnected by user
Dec 14 11:29:16 ip-10-31-43-117.us-east-1.aws.redhat.com sshd-session[4338]: Disconnected from user root 10.30.34.106 port 52592
Dec 14 11:29:16 ip-10-31-43-117.us-east-1.aws.redhat.com sshd-session[4322]: pam_systemd(sshd:session): New sd-bus connection (system-bus-pam-systemd-4322) opened.
Dec 14 11:29:16 ip-10-31-43-117.us-east-1.aws.redhat.com sshd-session[4322]: pam_unix(sshd:session): session closed for user root
Dec 14 11:29:16 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: session-1.scope: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ The unit session-1.scope has successfully entered the 'dead' state.
Dec 14 11:29:16 ip-10-31-43-117.us-east-1.aws.redhat.com systemd-logind[653]: Session 1 logged out. Waiting for processes to exit.
Dec 14 11:29:16 ip-10-31-43-117.us-east-1.aws.redhat.com systemd-logind[653]: Removed session 1.
░░ Subject: Session 1 has been terminated
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ Documentation: sd-login(3)
░░ 
░░ A session with the ID 1 has been terminated.
Dec 14 11:29:23 ip-10-31-43-117.us-east-1.aws.redhat.com sshd-session[4376]: Accepted publickey for root from 10.31.8.152 port 43942 ssh2: RSA SHA256:W3cSdmPJK+d9RwU97ardijPXIZnxHswrpTHWW9oYtEU
Dec 14 11:29:23 ip-10-31-43-117.us-east-1.aws.redhat.com sshd-session[4377]: Accepted publickey for root from 10.31.8.152 port 43954 ssh2: RSA SHA256:W3cSdmPJK+d9RwU97ardijPXIZnxHswrpTHWW9oYtEU
Dec 14 11:29:23 ip-10-31-43-117.us-east-1.aws.redhat.com sshd-session[4376]: pam_systemd(sshd:session): New sd-bus connection (system-bus-pam-systemd-4376) opened.
Dec 14 11:29:23 ip-10-31-43-117.us-east-1.aws.redhat.com sshd-session[4377]: pam_systemd(sshd:session): New sd-bus connection (system-bus-pam-systemd-4377) opened.
Dec 14 11:29:23 ip-10-31-43-117.us-east-1.aws.redhat.com systemd-logind[653]: New session 3 of user root.
░░ Subject: A new session 3 has been created for user root
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ Documentation: sd-login(3)
░░ 
░░ A new session with the ID 3 has been created for the user root.
░░ 
░░ The leading process of the session is 4376.
Dec 14 11:29:23 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: Started session-3.scope - Session 3 of User root.
░░ Subject: A start job for unit session-3.scope has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit session-3.scope has finished successfully.
░░ 
░░ The job identifier is 844.
Dec 14 11:29:23 ip-10-31-43-117.us-east-1.aws.redhat.com systemd-logind[653]: New session 4 of user root.
░░ Subject: A new session 4 has been created for user root
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ Documentation: sd-login(3)
░░ 
░░ A new session with the ID 4 has been created for the user root.
░░ 
░░ The leading process of the session is 4377.
Dec 14 11:29:23 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: Started session-4.scope - Session 4 of User root.
░░ Subject: A start job for unit session-4.scope has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit session-4.scope has finished successfully.
░░ 
░░ The job identifier is 926.
Dec 14 11:29:23 ip-10-31-43-117.us-east-1.aws.redhat.com sshd-session[4376]: pam_unix(sshd:session): session opened for user root(uid=0) by root(uid=0)
Dec 14 11:29:23 ip-10-31-43-117.us-east-1.aws.redhat.com sshd-session[4377]: pam_unix(sshd:session): session opened for user root(uid=0) by root(uid=0)
Dec 14 11:29:23 ip-10-31-43-117.us-east-1.aws.redhat.com sshd-session[4383]: Received disconnect from 10.31.8.152 port 43954:11: disconnected by user
Dec 14 11:29:23 ip-10-31-43-117.us-east-1.aws.redhat.com sshd-session[4383]: Disconnected from user root 10.31.8.152 port 43954
Dec 14 11:29:23 ip-10-31-43-117.us-east-1.aws.redhat.com sshd-session[4377]: pam_systemd(sshd:session): New sd-bus connection (system-bus-pam-systemd-4377) opened.
Dec 14 11:29:23 ip-10-31-43-117.us-east-1.aws.redhat.com sshd-session[4377]: pam_unix(sshd:session): session closed for user root
Dec 14 11:29:23 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: session-4.scope: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ The unit session-4.scope has successfully entered the 'dead' state.
Dec 14 11:29:23 ip-10-31-43-117.us-east-1.aws.redhat.com systemd-logind[653]: Session 4 logged out. Waiting for processes to exit.
Dec 14 11:29:23 ip-10-31-43-117.us-east-1.aws.redhat.com systemd-logind[653]: Removed session 4.
░░ Subject: Session 4 has been terminated
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ Documentation: sd-login(3)
░░ 
░░ A session with the ID 4 has been terminated.
Dec 14 11:29:28 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: Starting systemd-hostnamed.service - Hostname Service...
░░ Subject: A start job for unit systemd-hostnamed.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit systemd-hostnamed.service has begun execution.
░░ 
░░ The job identifier is 1008.
Dec 14 11:29:28 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: Started systemd-hostnamed.service - Hostname Service.
░░ Subject: A start job for unit systemd-hostnamed.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit systemd-hostnamed.service has finished successfully.
░░ 
░░ The job identifier is 1008.
Dec 14 11:29:28 managed-node1 systemd-hostnamed[5857]: Hostname set to <managed-node1> (static)
Dec 14 11:29:28 managed-node1 NetworkManager[703]: <info>  [1734193768.6492] hostname: static hostname changed from "ip-10-31-43-117.us-east-1.aws.redhat.com" to "managed-node1"
Dec 14 11:29:28 managed-node1 systemd[1]: Starting NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service...
░░ Subject: A start job for unit NetworkManager-dispatcher.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit NetworkManager-dispatcher.service has begun execution.
░░ 
░░ The job identifier is 1086.
Dec 14 11:29:28 managed-node1 systemd[1]: Started NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service.
░░ Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit NetworkManager-dispatcher.service has finished successfully.
░░ 
░░ The job identifier is 1086.
Dec 14 11:29:38 managed-node1 systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state.
Dec 14 11:29:58 managed-node1 systemd[1]: systemd-hostnamed.service: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ The unit systemd-hostnamed.service has successfully entered the 'dead' state.
Dec 14 11:30:05 managed-node1 sshd-session[6523]: Accepted publickey for root from 10.31.13.174 port 42640 ssh2: RSA SHA256:9j1blwt3wcrRiGYZQ7ZGu9axm3cDklH6/z4c+Ee8CzE
Dec 14 11:30:05 managed-node1 sshd-session[6523]: pam_systemd(sshd:session): New sd-bus connection (system-bus-pam-systemd-6523) opened.
Dec 14 11:30:05 managed-node1 systemd-logind[653]: New session 5 of user root.
░░ Subject: A new session 5 has been created for user root
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ Documentation: sd-login(3)
░░ 
░░ A new session with the ID 5 has been created for the user root.
░░ 
░░ The leading process of the session is 6523.
Dec 14 11:30:05 managed-node1 systemd[1]: Started session-5.scope - Session 5 of User root.
░░ Subject: A start job for unit session-5.scope has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit session-5.scope has finished successfully.
░░ 
░░ The job identifier is 1165.
Dec 14 11:30:05 managed-node1 sshd-session[6523]: pam_unix(sshd:session): session opened for user root(uid=0) by root(uid=0)
Dec 14 11:30:06 managed-node1 python3.12[6679]: ansible-ansible.legacy.setup Invoked with gather_subset=['all'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d
Dec 14 11:30:08 managed-node1 python3.12[6839]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Dec 14 11:30:08 managed-node1 python3.12[6970]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Dec 14 11:30:10 managed-node1 sudo[7232]:     root : TTY=pts/0 ; PWD=/root ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-qkiobsjfqecahznjrwohrybwqhyobeje ; /usr/bin/python3.12 /root/.ansible/tmp/ansible-tmp-1734193809.9793763-6984-204031990791383/AnsiballZ_dnf.py'
Dec 14 11:30:10 managed-node1 sudo[7232]: pam_systemd(sudo:session): New sd-bus connection (system-bus-pam-systemd-7232) opened.
Dec 14 11:30:10 managed-node1 sudo[7232]: pam_unix(sudo:session): session opened for user root(uid=0) by root(uid=0)
Dec 14 11:30:10 managed-node1 python3.12[7235]: ansible-ansible.legacy.dnf Invoked with name=['iptables-nft', 'podman', 'shadow-utils-subid'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None
Dec 14 11:30:27 managed-node1 kernel: SELinux:  Converting 384 SID table entries...
Dec 14 11:30:27 managed-node1 kernel: SELinux:  policy capability network_peer_controls=1
Dec 14 11:30:27 managed-node1 kernel: SELinux:  policy capability open_perms=1
Dec 14 11:30:27 managed-node1 kernel: SELinux:  policy capability extended_socket_class=1
Dec 14 11:30:27 managed-node1 kernel: SELinux:  policy capability always_check_network=0
Dec 14 11:30:27 managed-node1 kernel: SELinux:  policy capability cgroup_seclabel=1
Dec 14 11:30:27 managed-node1 kernel: SELinux:  policy capability nnp_nosuid_transition=1
Dec 14 11:30:27 managed-node1 kernel: SELinux:  policy capability genfs_seclabel_symlinks=1
Dec 14 11:30:27 managed-node1 kernel: SELinux:  policy capability ioctl_skip_cloexec=0
Dec 14 11:30:27 managed-node1 kernel: SELinux:  policy capability userspace_initial_context=0
Dec 14 11:30:34 managed-node1 kernel: SELinux:  Converting 385 SID table entries...
Dec 14 11:30:34 managed-node1 kernel: SELinux:  policy capability network_peer_controls=1
Dec 14 11:30:34 managed-node1 kernel: SELinux:  policy capability open_perms=1
Dec 14 11:30:34 managed-node1 kernel: SELinux:  policy capability extended_socket_class=1
Dec 14 11:30:34 managed-node1 kernel: SELinux:  policy capability always_check_network=0
Dec 14 11:30:34 managed-node1 kernel: SELinux:  policy capability cgroup_seclabel=1
Dec 14 11:30:34 managed-node1 kernel: SELinux:  policy capability nnp_nosuid_transition=1
Dec 14 11:30:34 managed-node1 kernel: SELinux:  policy capability genfs_seclabel_symlinks=1
Dec 14 11:30:34 managed-node1 kernel: SELinux:  policy capability ioctl_skip_cloexec=0
Dec 14 11:30:34 managed-node1 kernel: SELinux:  policy capability userspace_initial_context=0
Dec 14 11:30:42 managed-node1 kernel: SELinux:  Converting 385 SID table entries...
Dec 14 11:30:42 managed-node1 kernel: SELinux:  policy capability network_peer_controls=1
Dec 14 11:30:42 managed-node1 kernel: SELinux:  policy capability open_perms=1
Dec 14 11:30:42 managed-node1 kernel: SELinux:  policy capability extended_socket_class=1
Dec 14 11:30:42 managed-node1 kernel: SELinux:  policy capability always_check_network=0
Dec 14 11:30:42 managed-node1 kernel: SELinux:  policy capability cgroup_seclabel=1
Dec 14 11:30:42 managed-node1 kernel: SELinux:  policy capability nnp_nosuid_transition=1
Dec 14 11:30:42 managed-node1 kernel: SELinux:  policy capability genfs_seclabel_symlinks=1
Dec 14 11:30:42 managed-node1 kernel: SELinux:  policy capability ioctl_skip_cloexec=0
Dec 14 11:30:42 managed-node1 kernel: SELinux:  policy capability userspace_initial_context=0
Dec 14 11:30:43 managed-node1 setsebool[7309]: The virt_use_nfs policy boolean was changed to 1 by root
Dec 14 11:30:43 managed-node1 setsebool[7309]: The virt_sandbox_use_all_caps policy boolean was changed to 1 by root
Dec 14 11:30:52 managed-node1 kernel: SELinux:  Converting 388 SID table entries...
Dec 14 11:30:52 managed-node1 kernel: SELinux:  policy capability network_peer_controls=1
Dec 14 11:30:52 managed-node1 kernel: SELinux:  policy capability open_perms=1
Dec 14 11:30:52 managed-node1 kernel: SELinux:  policy capability extended_socket_class=1
Dec 14 11:30:52 managed-node1 kernel: SELinux:  policy capability always_check_network=0
Dec 14 11:30:52 managed-node1 kernel: SELinux:  policy capability cgroup_seclabel=1
Dec 14 11:30:52 managed-node1 kernel: SELinux:  policy capability nnp_nosuid_transition=1
Dec 14 11:30:52 managed-node1 kernel: SELinux:  policy capability genfs_seclabel_symlinks=1
Dec 14 11:30:52 managed-node1 kernel: SELinux:  policy capability ioctl_skip_cloexec=0
Dec 14 11:30:52 managed-node1 kernel: SELinux:  policy capability userspace_initial_context=0
Dec 14 11:31:00 managed-node1 kernel: SELinux:  Converting 388 SID table entries...
Dec 14 11:31:00 managed-node1 kernel: SELinux:  policy capability network_peer_controls=1
Dec 14 11:31:00 managed-node1 kernel: SELinux:  policy capability open_perms=1
Dec 14 11:31:00 managed-node1 kernel: SELinux:  policy capability extended_socket_class=1
Dec 14 11:31:00 managed-node1 kernel: SELinux:  policy capability always_check_network=0
Dec 14 11:31:00 managed-node1 kernel: SELinux:  policy capability cgroup_seclabel=1
Dec 14 11:31:00 managed-node1 kernel: SELinux:  policy capability nnp_nosuid_transition=1
Dec 14 11:31:00 managed-node1 kernel: SELinux:  policy capability genfs_seclabel_symlinks=1
Dec 14 11:31:00 managed-node1 kernel: SELinux:  policy capability ioctl_skip_cloexec=0
Dec 14 11:31:00 managed-node1 kernel: SELinux:  policy capability userspace_initial_context=0
Dec 14 11:31:17 managed-node1 systemd[1]: Started run-rdcb31fbbad404dfd86db5482f938d0b1.service - /usr/bin/systemctl start man-db-cache-update.
░░ Subject: A start job for unit run-rdcb31fbbad404dfd86db5482f938d0b1.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit run-rdcb31fbbad404dfd86db5482f938d0b1.service has finished successfully.
░░ 
░░ The job identifier is 1247.
Dec 14 11:31:17 managed-node1 systemd[1]: Reload requested from client PID 8034 ('systemctl') (unit session-5.scope)...
Dec 14 11:31:17 managed-node1 systemd[1]: Reloading...
Dec 14 11:31:17 managed-node1 systemd[1]: Reloading finished in 190 ms.
Dec 14 11:31:17 managed-node1 systemd[1]: Starting man-db-cache-update.service...
░░ Subject: A start job for unit man-db-cache-update.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit man-db-cache-update.service has begun execution.
░░ 
░░ The job identifier is 1325.
Dec 14 11:31:17 managed-node1 systemd[1]: Queuing reload/restart jobs for marked units…
Dec 14 11:31:18 managed-node1 sudo[7232]: pam_unix(sudo:session): session closed for user root
Dec 14 11:31:18 managed-node1 python3.12[8229]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Dec 14 11:31:19 managed-node1 python3.12[8367]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None
Dec 14 11:31:19 managed-node1 systemd[1]: man-db-cache-update.service: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ The unit man-db-cache-update.service has successfully entered the 'dead' state.
Dec 14 11:31:19 managed-node1 systemd[1]: Finished man-db-cache-update.service.
░░ Subject: A start job for unit man-db-cache-update.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit man-db-cache-update.service has finished successfully.
░░ 
░░ The job identifier is 1325.
Dec 14 11:31:19 managed-node1 systemd[1]: run-rdcb31fbbad404dfd86db5482f938d0b1.service: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ The unit run-rdcb31fbbad404dfd86db5482f938d0b1.service has successfully entered the 'dead' state.
Dec 14 11:31:20 managed-node1 python3.12[8503]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Dec 14 11:31:22 managed-node1 python3.12[8636]: ansible-tempfile Invoked with prefix=lsr_ suffix=_podman state=directory path=None
Dec 14 11:31:22 managed-node1 python3.12[8767]: ansible-file Invoked with path=/tmp/lsr_6ehua9m0_podman/auth state=directory mode=0700 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Dec 14 11:31:23 managed-node1 python3.12[8898]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Dec 14 11:31:24 managed-node1 python3.12[9029]: ansible-ansible.legacy.dnf Invoked with name=['python3-pyasn1', 'python3-cryptography', 'python3-dbus'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None
Dec 14 11:31:26 managed-node1 python3.12[9165]: ansible-ansible.legacy.dnf Invoked with name=['certmonger', 'python3-packaging'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None
Dec 14 11:31:27 managed-node1 dbus-broker-launch[637]: Noticed file-system modification, trigger reload.
░░ Subject: A configuration directory was written to
░░ Defined-By: dbus-broker
░░ Support: https://groups.google.com/forum/#!forum/bus1-devel
░░ 
░░ A write was detected to one of the directories containing D-Bus configuration
░░ files, triggering a configuration reload.
░░ 
░░ This functionality exists for backwards compatibility to pick up changes to
░░ D-Bus configuration without an explicit reolad request. Typically when
░░ installing or removing third-party software causes D-Bus configuration files
░░ to be added or removed.
░░ 
░░ It is worth noting that this may cause partial configuration to be loaded in
░░ case dispatching this notification races with the writing of the configuration
░░ files. However, a future notification will then cause the configuration to be
░░ reladed again.
Dec 14 11:31:27 managed-node1 dbus-broker-launch[637]: Noticed file-system modification, trigger reload.
░░ Subject: A configuration directory was written to
░░ Defined-By: dbus-broker
░░ Support: https://groups.google.com/forum/#!forum/bus1-devel
░░ 
░░ A write was detected to one of the directories containing D-Bus configuration
░░ files, triggering a configuration reload.
░░ 
░░ This functionality exists for backwards compatibility to pick up changes to
░░ D-Bus configuration without an explicit reolad request. Typically when
░░ installing or removing third-party software causes D-Bus configuration files
░░ to be added or removed.
░░ 
░░ It is worth noting that this may cause partial configuration to be loaded in
░░ case dispatching this notification races with the writing of the configuration
░░ files. However, a future notification will then cause the configuration to be
░░ reladed again.
Dec 14 11:31:27 managed-node1 dbus-broker-launch[637]: Noticed file-system modification, trigger reload.
░░ Subject: A configuration directory was written to
░░ Defined-By: dbus-broker
░░ Support: https://groups.google.com/forum/#!forum/bus1-devel
░░ 
░░ A write was detected to one of the directories containing D-Bus configuration
░░ files, triggering a configuration reload.
░░ 
░░ This functionality exists for backwards compatibility to pick up changes to
░░ D-Bus configuration without an explicit reolad request. Typically when
░░ installing or removing third-party software causes D-Bus configuration files
░░ to be added or removed.
░░ 
░░ It is worth noting that this may cause partial configuration to be loaded in
░░ case dispatching this notification races with the writing of the configuration
░░ files. However, a future notification will then cause the configuration to be
░░ reladed again.
Dec 14 11:31:27 managed-node1 systemd[1]: Reload requested from client PID 9173 ('systemctl') (unit session-5.scope)...
Dec 14 11:31:27 managed-node1 systemd[1]: Reloading...
Dec 14 11:31:28 managed-node1 systemd[1]: Reloading finished in 183 ms.
Dec 14 11:31:28 managed-node1 systemd[1]: Started run-r3d4cf19d1fc24d23b770a4063f70f37f.service - /usr/bin/systemctl start man-db-cache-update.
░░ Subject: A start job for unit run-r3d4cf19d1fc24d23b770a4063f70f37f.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit run-r3d4cf19d1fc24d23b770a4063f70f37f.service has finished successfully.
░░ 
░░ The job identifier is 1407.
Dec 14 11:31:28 managed-node1 systemd[1]: Starting man-db-cache-update.service...
░░ Subject: A start job for unit man-db-cache-update.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit man-db-cache-update.service has begun execution.
░░ 
░░ The job identifier is 1485.
Dec 14 11:31:28 managed-node1 systemd[1]: Reload requested from client PID 9234 ('systemctl') (unit session-5.scope)...
Dec 14 11:31:28 managed-node1 systemd[1]: Reloading...
Dec 14 11:31:28 managed-node1 systemd[1]: Reloading finished in 291 ms.
Dec 14 11:31:28 managed-node1 systemd[1]: Queuing reload/restart jobs for marked units…
Dec 14 11:31:29 managed-node1 systemd[1]: man-db-cache-update.service: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ The unit man-db-cache-update.service has successfully entered the 'dead' state.
Dec 14 11:31:29 managed-node1 systemd[1]: Finished man-db-cache-update.service.
░░ Subject: A start job for unit man-db-cache-update.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit man-db-cache-update.service has finished successfully.
░░ 
░░ The job identifier is 1485.
Dec 14 11:31:29 managed-node1 systemd[1]: run-r3d4cf19d1fc24d23b770a4063f70f37f.service: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ The unit run-r3d4cf19d1fc24d23b770a4063f70f37f.service has successfully entered the 'dead' state.
Dec 14 11:31:29 managed-node1 python3.12[9425]: ansible-file Invoked with name=/etc/certmonger//pre-scripts owner=root group=root mode=0700 state=directory path=/etc/certmonger//pre-scripts recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Dec 14 11:31:29 managed-node1 python3.12[9556]: ansible-file Invoked with name=/etc/certmonger//post-scripts owner=root group=root mode=0700 state=directory path=/etc/certmonger//post-scripts recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Dec 14 11:31:30 managed-node1 python3.12[9687]: ansible-ansible.legacy.systemd Invoked with name=certmonger state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None
Dec 14 11:31:30 managed-node1 systemd[1]: Reload requested from client PID 9690 ('systemctl') (unit session-5.scope)...
Dec 14 11:31:30 managed-node1 systemd[1]: Reloading...
Dec 14 11:31:30 managed-node1 systemd[1]: Reloading finished in 186 ms.
Dec 14 11:31:30 managed-node1 systemd[1]: Starting fstrim.service - Discard unused blocks on filesystems from /etc/fstab...
░░ Subject: A start job for unit fstrim.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit fstrim.service has begun execution.
░░ 
░░ The job identifier is 1563.
Dec 14 11:31:31 managed-node1 systemd[1]: Starting certmonger.service - Certificate monitoring and PKI enrollment...
░░ Subject: A start job for unit certmonger.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit certmonger.service has begun execution.
░░ 
░░ The job identifier is 1641.
Dec 14 11:31:31 managed-node1 (rtmonger)[9745]: certmonger.service: Referenced but unset environment variable evaluates to an empty string: OPTS
Dec 14 11:31:31 managed-node1 systemd[1]: fstrim.service: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ The unit fstrim.service has successfully entered the 'dead' state.
Dec 14 11:31:31 managed-node1 systemd[1]: Finished fstrim.service - Discard unused blocks on filesystems from /etc/fstab.
░░ Subject: A start job for unit fstrim.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit fstrim.service has finished successfully.
░░ 
░░ The job identifier is 1563.
Dec 14 11:31:31 managed-node1 systemd[1]: Started certmonger.service - Certificate monitoring and PKI enrollment.
░░ Subject: A start job for unit certmonger.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit certmonger.service has finished successfully.
░░ 
░░ The job identifier is 1641.
Dec 14 11:31:31 managed-node1 python3.12[9904]: ansible-fedora.linux_system_roles.certificate_request Invoked with name=podman_registry dns=['localhost', '127.0.0.1'] directory=/etc/pki/tls wait=True ca=self-sign __header=#
                                                # Ansible managed
                                                #
                                                # system_role:certificate
                                                 provider_config_directory=/etc/certmonger provider=certmonger key_usage=['digitalSignature', 'keyEncipherment'] extended_key_usage=['id-kp-serverAuth', 'id-kp-clientAuth'] auto_renew=True ip=None email=None common_name=None country=None state=None locality=None organization=None organizational_unit=None contact_email=None key_size=None owner=None group=None mode=None principal=None run_before=None run_after=None
Dec 14 11:31:31 managed-node1 certmonger[9745]: 2024-12-14 11:31:31 [9745] Wrote to /var/lib/certmonger/requests/20241214163131
Dec 14 11:31:31 managed-node1 certmonger[9745]: 2024-12-14 11:31:31 [9745] Wrote to /var/lib/certmonger/requests/20241214163131
Dec 14 11:31:31 managed-node1 certmonger[9745]: 2024-12-14 11:31:31 [9745] Wrote to /var/lib/certmonger/requests/20241214163131
Dec 14 11:31:31 managed-node1 certmonger[9745]: 2024-12-14 11:31:31 [9745] Wrote to /var/lib/certmonger/requests/20241214163131
Dec 14 11:31:31 managed-node1 certmonger[9745]: 2024-12-14 11:31:31 [9745] Wrote to /var/lib/certmonger/requests/20241214163131
Dec 14 11:31:31 managed-node1 certmonger[9745]: 2024-12-14 11:31:31 [9745] Wrote to /var/lib/certmonger/requests/20241214163131
Dec 14 11:31:31 managed-node1 certmonger[9745]: 2024-12-14 11:31:31 [9745] Wrote to /var/lib/certmonger/requests/20241214163131
Dec 14 11:31:31 managed-node1 certmonger[9745]: 2024-12-14 11:31:31 [9745] Wrote to /var/lib/certmonger/requests/20241214163131
Dec 14 11:31:31 managed-node1 certmonger[9745]: 2024-12-14 11:31:31 [9745] Wrote to /var/lib/certmonger/requests/20241214163131
Dec 14 11:31:32 managed-node1 certmonger[9745]: 2024-12-14 11:31:32 [9745] Wrote to /var/lib/certmonger/requests/20241214163131
Dec 14 11:31:32 managed-node1 certmonger[9745]: 2024-12-14 11:31:32 [9745] Wrote to /var/lib/certmonger/requests/20241214163131
Dec 14 11:31:32 managed-node1 certmonger[9745]: 2024-12-14 11:31:32 [9745] Wrote to /var/lib/certmonger/requests/20241214163131
Dec 14 11:31:32 managed-node1 certmonger[9745]: 2024-12-14 11:31:32 [9745] Wrote to /var/lib/certmonger/requests/20241214163131
Dec 14 11:31:32 managed-node1 certmonger[9745]: 2024-12-14 11:31:32 [9745] Wrote to /var/lib/certmonger/requests/20241214163131
Dec 14 11:31:32 managed-node1 certmonger[9745]: 2024-12-14 11:31:32 [9745] Wrote to /var/lib/certmonger/requests/20241214163131
Dec 14 11:31:32 managed-node1 certmonger[9745]: 2024-12-14 11:31:32 [9745] Wrote to /var/lib/certmonger/requests/20241214163131
Dec 14 11:31:32 managed-node1 certmonger[9745]: 2024-12-14 11:31:32 [9745] Wrote to /var/lib/certmonger/requests/20241214163131
Dec 14 11:31:32 managed-node1 certmonger[9745]: 2024-12-14 11:31:32 [9745] Wrote to /var/lib/certmonger/requests/20241214163131
Dec 14 11:31:32 managed-node1 certmonger[9745]: 2024-12-14 11:31:32 [9745] Wrote to /var/lib/certmonger/requests/20241214163131
Dec 14 11:31:32 managed-node1 certmonger[9745]: 2024-12-14 11:31:32 [9745] Wrote to /var/lib/certmonger/requests/20241214163131
Dec 14 11:31:32 managed-node1 certmonger[9745]: 2024-12-14 11:31:32 [9745] Wrote to /var/lib/certmonger/requests/20241214163131
Dec 14 11:31:32 managed-node1 certmonger[9745]: 2024-12-14 11:31:32 [9745] Wrote to /var/lib/certmonger/requests/20241214163131
Dec 14 11:31:32 managed-node1 certmonger[9745]: 2024-12-14 11:31:32 [9745] Wrote to /var/lib/certmonger/requests/20241214163131
Dec 14 11:31:32 managed-node1 certmonger[9745]: 2024-12-14 11:31:32 [9745] Wrote to /var/lib/certmonger/requests/20241214163131
Dec 14 11:31:32 managed-node1 certmonger[9745]: 2024-12-14 11:31:32 [9745] Wrote to /var/lib/certmonger/requests/20241214163131
Dec 14 11:31:32 managed-node1 certmonger[9745]: 2024-12-14 11:31:32 [9745] Wrote to /var/lib/certmonger/requests/20241214163131
Dec 14 11:31:32 managed-node1 certmonger[9745]: 2024-12-14 11:31:32 [9745] Wrote to /var/lib/certmonger/requests/20241214163131
Dec 14 11:31:32 managed-node1 certmonger[9745]: 2024-12-14 11:31:32 [9745] Wrote to /var/lib/certmonger/requests/20241214163131
Dec 14 11:31:32 managed-node1 certmonger[9745]: 2024-12-14 11:31:32 [9745] Wrote to /var/lib/certmonger/requests/20241214163131
Dec 14 11:31:32 managed-node1 certmonger[9919]: Certificate in file "/etc/pki/tls/certs/podman_registry.crt" issued by CA and saved.
Dec 14 11:31:32 managed-node1 certmonger[9745]: 2024-12-14 11:31:32 [9745] Wrote to /var/lib/certmonger/requests/20241214163131
Dec 14 11:31:32 managed-node1 python3.12[10050]: ansible-slurp Invoked with path=/etc/pki/tls/certs/podman_registry.crt src=/etc/pki/tls/certs/podman_registry.crt
Dec 14 11:31:33 managed-node1 python3.12[10181]: ansible-slurp Invoked with path=/etc/pki/tls/private/podman_registry.key src=/etc/pki/tls/private/podman_registry.key
Dec 14 11:31:33 managed-node1 python3.12[10312]: ansible-slurp Invoked with path=/etc/pki/tls/certs/podman_registry.crt src=/etc/pki/tls/certs/podman_registry.crt
Dec 14 11:31:34 managed-node1 python3.12[10443]: ansible-ansible.legacy.command Invoked with _raw_params=getcert stop-tracking -f /etc/pki/tls/certs/podman_registry.crt _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Dec 14 11:31:34 managed-node1 certmonger[9745]: 2024-12-14 11:31:34 [9745] Wrote to /var/lib/certmonger/requests/20241214163131
Dec 14 11:31:34 managed-node1 python3.12[10575]: ansible-file Invoked with path=/etc/pki/tls/certs/podman_registry.crt state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Dec 14 11:31:34 managed-node1 python3.12[10706]: ansible-file Invoked with path=/etc/pki/tls/private/podman_registry.key state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Dec 14 11:31:35 managed-node1 python3.12[10837]: ansible-file Invoked with path=/etc/pki/tls/certs/podman_registry.crt state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Dec 14 11:31:35 managed-node1 python3.12[10968]: ansible-ansible.legacy.stat Invoked with path=/tmp/lsr_6ehua9m0_podman/auth/registry_cert.crt follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Dec 14 11:31:36 managed-node1 python3.12[11073]: ansible-ansible.legacy.copy Invoked with dest=/tmp/lsr_6ehua9m0_podman/auth/registry_cert.crt mode=0600 src=/root/.ansible/tmp/ansible-tmp-1734193895.4098308-8404-275762393983029/.source.crt _original_basename=.fdte2xv6 follow=False checksum=a56753cb72985d5015d277aab9534d583f3099c2 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Dec 14 11:31:36 managed-node1 python3.12[11204]: ansible-ansible.legacy.stat Invoked with path=/tmp/lsr_6ehua9m0_podman/auth/registry_key.pem follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Dec 14 11:31:36 managed-node1 python3.12[11309]: ansible-ansible.legacy.copy Invoked with dest=/tmp/lsr_6ehua9m0_podman/auth/registry_key.pem mode=0600 src=/root/.ansible/tmp/ansible-tmp-1734193896.2446203-8452-271283031401177/.source.pem _original_basename=.b248p56a follow=False checksum=3c4bd2383044d864f778448dd3788c2bdf7f63a0 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Dec 14 11:31:37 managed-node1 python3.12[11440]: ansible-ansible.legacy.stat Invoked with path=/tmp/lsr_6ehua9m0_podman/auth/ca.crt follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Dec 14 11:31:37 managed-node1 python3.12[11545]: ansible-ansible.legacy.copy Invoked with dest=/tmp/lsr_6ehua9m0_podman/auth/ca.crt mode=0600 src=/root/.ansible/tmp/ansible-tmp-1734193896.9596615-8488-86842030413265/.source.crt _original_basename=.5xr0fb34 follow=False checksum=a56753cb72985d5015d277aab9534d583f3099c2 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Dec 14 11:31:38 managed-node1 python3.12[11676]: ansible-ansible.legacy.dnf Invoked with name=['httpd-tools', 'skopeo'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None
Dec 14 11:31:42 managed-node1 systemd[1]: Started run-ra2bab39c1da445c09f883f3d116af994.service - /usr/bin/systemctl start man-db-cache-update.
░░ Subject: A start job for unit run-ra2bab39c1da445c09f883f3d116af994.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit run-ra2bab39c1da445c09f883f3d116af994.service has finished successfully.
░░ 
░░ The job identifier is 1720.
Dec 14 11:31:42 managed-node1 systemd[1]: Starting man-db-cache-update.service...
░░ Subject: A start job for unit man-db-cache-update.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit man-db-cache-update.service has begun execution.
░░ 
░░ The job identifier is 1798.
Dec 14 11:31:42 managed-node1 systemd[1]: man-db-cache-update.service: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ The unit man-db-cache-update.service has successfully entered the 'dead' state.
Dec 14 11:31:42 managed-node1 systemd[1]: Finished man-db-cache-update.service.
░░ Subject: A start job for unit man-db-cache-update.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit man-db-cache-update.service has finished successfully.
░░ 
░░ The job identifier is 1798.
Dec 14 11:31:42 managed-node1 systemd[1]: run-ra2bab39c1da445c09f883f3d116af994.service: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ The unit run-ra2bab39c1da445c09f883f3d116af994.service has successfully entered the 'dead' state.
Dec 14 11:31:43 managed-node1 python3.12[12190]: ansible-ansible.legacy.command Invoked with _raw_params=podman run -d -p 127.0.0.1:5000:5000 --name podman_registry -v /tmp/lsr_6ehua9m0_podman/auth:/auth:Z -e REGISTRY_AUTH=htpasswd -e "REGISTRY_AUTH_HTPASSWD_REALM=Registry Realm" -e REGISTRY_AUTH_HTPASSWD_PATH=/auth/htpasswd -e REGISTRY_HTTP_TLS_CERTIFICATE=/auth/registry_cert.crt -e REGISTRY_HTTP_TLS_KEY=/auth/registry_key.pem quay.io/libpod/registry:2.8.2 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Dec 14 11:31:44 managed-node1 systemd[1]: var-lib-containers-storage-overlay-compat1989105179-merged.mount: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ The unit var-lib-containers-storage-overlay-compat1989105179-merged.mount has successfully entered the 'dead' state.
Dec 14 11:31:44 managed-node1 kernel: evm: overlay not supported
Dec 14 11:31:44 managed-node1 systemd[1]: var-lib-containers-storage-overlay-metacopy\x2dcheck2016384165-merged.mount: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ The unit var-lib-containers-storage-overlay-metacopy\x2dcheck2016384165-merged.mount has successfully entered the 'dead' state.
Dec 14 11:31:44 managed-node1 podman[12191]: 2024-12-14 11:31:44.032711562 -0500 EST m=+0.082834242 system refresh
Dec 14 11:31:45 managed-node1 podman[12191]: 2024-12-14 11:31:45.721997956 -0500 EST m=+1.772120507 volume create 174bec1e6ec18bfefedd3e5fc8a18b56102b9fb6012f8e02b781fd81b243eef3
Dec 14 11:31:45 managed-node1 podman[12191]: 2024-12-14 11:31:45.701531903 -0500 EST m=+1.751654658 image pull 0030ba3d620c647159c935ee778991c68ef3e51a274703753b0bc530104ef5e5 quay.io/libpod/registry:2.8.2
Dec 14 11:31:45 managed-node1 podman[12191]: 2024-12-14 11:31:45.732182371 -0500 EST m=+1.782304936 container create 013623ab41c550d739cb44f590fc1fde1e9557fa6fbbb3a443ef1d0bf0f6f57a (image=quay.io/libpod/registry:2.8.2, name=podman_registry)
Dec 14 11:31:45 managed-node1 kernel: bridge: filtering via arp/ip/ip6tables is no longer available by default. Update your scripts to load br_netfilter if you need this.
Dec 14 11:31:45 managed-node1 NetworkManager[703]: <info>  [1734193905.7779] manager: (podman0): new Bridge device (/org/freedesktop/NetworkManager/Devices/3)
Dec 14 11:31:45 managed-node1 (udev-worker)[12280]: Network interface NamePolicy= disabled on kernel command line.
Dec 14 11:31:45 managed-node1 kernel: podman0: port 1(veth0) entered blocking state
Dec 14 11:31:45 managed-node1 kernel: podman0: port 1(veth0) entered disabled state
Dec 14 11:31:45 managed-node1 kernel: veth0: entered allmulticast mode
Dec 14 11:31:45 managed-node1 kernel: veth0: entered promiscuous mode
Dec 14 11:31:45 managed-node1 kernel: podman0: port 1(veth0) entered blocking state
Dec 14 11:31:45 managed-node1 kernel: podman0: port 1(veth0) entered forwarding state
Dec 14 11:31:45 managed-node1 (udev-worker)[12198]: Network interface NamePolicy= disabled on kernel command line.
Dec 14 11:31:45 managed-node1 NetworkManager[703]: <info>  [1734193905.7970] device (veth0): carrier: link connected
Dec 14 11:31:45 managed-node1 NetworkManager[703]: <info>  [1734193905.7973] manager: (veth0): new Veth device (/org/freedesktop/NetworkManager/Devices/4)
Dec 14 11:31:45 managed-node1 NetworkManager[703]: <info>  [1734193905.7998] device (podman0): carrier: link connected
Dec 14 11:31:45 managed-node1 NetworkManager[703]: <info>  [1734193905.8066] device (podman0): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external')
Dec 14 11:31:45 managed-node1 NetworkManager[703]: <info>  [1734193905.8071] device (podman0): state change: unavailable -> disconnected (reason 'connection-assumed', managed-type: 'external')
Dec 14 11:31:45 managed-node1 NetworkManager[703]: <info>  [1734193905.8078] device (podman0): Activation: starting connection 'podman0' (08e2f206-5ac2-4e2f-8306-ac90b232dcf4)
Dec 14 11:31:45 managed-node1 NetworkManager[703]: <info>  [1734193905.8080] device (podman0): state change: disconnected -> prepare (reason 'none', managed-type: 'external')
Dec 14 11:31:45 managed-node1 NetworkManager[703]: <info>  [1734193905.8083] device (podman0): state change: prepare -> config (reason 'none', managed-type: 'external')
Dec 14 11:31:45 managed-node1 NetworkManager[703]: <info>  [1734193905.8086] device (podman0): state change: config -> ip-config (reason 'none', managed-type: 'external')
Dec 14 11:31:45 managed-node1 NetworkManager[703]: <info>  [1734193905.8089] device (podman0): state change: ip-config -> ip-check (reason 'none', managed-type: 'external')
Dec 14 11:31:45 managed-node1 systemd[1]: Starting NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service...
░░ Subject: A start job for unit NetworkManager-dispatcher.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit NetworkManager-dispatcher.service has begun execution.
░░ 
░░ The job identifier is 1877.
Dec 14 11:31:45 managed-node1 systemd[1]: Started NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service.
░░ Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit NetworkManager-dispatcher.service has finished successfully.
░░ 
░░ The job identifier is 1877.
Dec 14 11:31:45 managed-node1 NetworkManager[703]: <info>  [1734193905.8659] device (podman0): state change: ip-check -> secondaries (reason 'none', managed-type: 'external')
Dec 14 11:31:45 managed-node1 NetworkManager[703]: <info>  [1734193905.8662] device (podman0): state change: secondaries -> activated (reason 'none', managed-type: 'external')
Dec 14 11:31:45 managed-node1 NetworkManager[703]: <info>  [1734193905.8667] device (podman0): Activation: successful, device activated.
Dec 14 11:31:46 managed-node1 systemd[1]: Created slice machine.slice - Slice /machine.
░░ Subject: A start job for unit machine.slice has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit machine.slice has finished successfully.
░░ 
░░ The job identifier is 1957.
Dec 14 11:31:46 managed-node1 systemd[1]: Started libpod-conmon-013623ab41c550d739cb44f590fc1fde1e9557fa6fbbb3a443ef1d0bf0f6f57a.scope.
░░ Subject: A start job for unit libpod-conmon-013623ab41c550d739cb44f590fc1fde1e9557fa6fbbb3a443ef1d0bf0f6f57a.scope has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit libpod-conmon-013623ab41c550d739cb44f590fc1fde1e9557fa6fbbb3a443ef1d0bf0f6f57a.scope has finished successfully.
░░ 
░░ The job identifier is 1956.
Dec 14 11:31:46 managed-node1 systemd[1]: Started libpod-013623ab41c550d739cb44f590fc1fde1e9557fa6fbbb3a443ef1d0bf0f6f57a.scope - libcrun container.
░░ Subject: A start job for unit libpod-013623ab41c550d739cb44f590fc1fde1e9557fa6fbbb3a443ef1d0bf0f6f57a.scope has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit libpod-013623ab41c550d739cb44f590fc1fde1e9557fa6fbbb3a443ef1d0bf0f6f57a.scope has finished successfully.
░░ 
░░ The job identifier is 1962.
Dec 14 11:31:46 managed-node1 podman[12191]: 2024-12-14 11:31:46.056393753 -0500 EST m=+2.106516450 container init 013623ab41c550d739cb44f590fc1fde1e9557fa6fbbb3a443ef1d0bf0f6f57a (image=quay.io/libpod/registry:2.8.2, name=podman_registry)
Dec 14 11:31:46 managed-node1 podman[12191]: 2024-12-14 11:31:46.060003186 -0500 EST m=+2.110125831 container start 013623ab41c550d739cb44f590fc1fde1e9557fa6fbbb3a443ef1d0bf0f6f57a (image=quay.io/libpod/registry:2.8.2, name=podman_registry)
Dec 14 11:31:46 managed-node1 python3.12[12486]: ansible-wait_for Invoked with port=5000 host=127.0.0.1 timeout=300 connect_timeout=5 delay=0 active_connection_states=['ESTABLISHED', 'FIN_WAIT1', 'FIN_WAIT2', 'SYN_RECV', 'SYN_SENT', 'TIME_WAIT'] state=started sleep=1 path=None search_regex=None exclude_hosts=None msg=None
Dec 14 11:31:47 managed-node1 python3.12[12617]: ansible-ansible.legacy.command Invoked with _raw_params=podman logs podman_registry _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Dec 14 11:31:47 managed-node1 python3.12[12755]: ansible-ansible.legacy.command Invoked with _raw_params=podman pull quay.io/libpod/testimage:20210610; podman push --authfile="/tmp/lsr_6ehua9m0_podman/auth/auth.json" --cert-dir="/tmp/lsr_6ehua9m0_podman/auth" quay.io/libpod/testimage:20210610 docker://localhost:5000/libpod/testimage:20210610 _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Dec 14 11:31:49 managed-node1 podman[12757]: 2024-12-14 11:31:49.445577203 -0500 EST m=+1.809759385 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610
Dec 14 11:31:50 managed-node1 podman[12756]: 2024-12-14 11:31:49.47108663 -0500 EST m=+0.016538804 image push 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f docker://localhost:5000/libpod/testimage:20210610
Dec 14 11:31:50 managed-node1 python3.12[12918]: ansible-ansible.legacy.command Invoked with _raw_params=skopeo inspect --authfile="/tmp/lsr_6ehua9m0_podman/auth/auth.json" --cert-dir="/tmp/lsr_6ehua9m0_podman/auth" docker://localhost:5000/libpod/testimage:20210610 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Dec 14 11:31:53 managed-node1 python3.12[13187]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Dec 14 11:31:54 managed-node1 python3.12[13324]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Dec 14 11:31:55 managed-node1 systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state.
Dec 14 11:31:57 managed-node1 python3.12[13458]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Dec 14 11:31:58 managed-node1 python3.12[13591]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Dec 14 11:32:00 managed-node1 podman[13732]: 2024-12-14 11:32:00.132917471 -0500 EST m=+0.121086648 image pull-error  localhost:5000/libpod/testimage:20210610 initializing source docker://localhost:5000/libpod/testimage:20210610: reading manifest 20210610 in localhost:5000/libpod/testimage: authentication required
Dec 14 11:32:02 managed-node1 python3.12[14001]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Dec 14 11:32:04 managed-node1 python3.12[14138]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Dec 14 11:32:07 managed-node1 python3.12[14271]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Dec 14 11:32:08 managed-node1 python3.12[14404]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Dec 14 11:32:09 managed-node1 podman[14543]: 2024-12-14 11:32:09.204081254 -0500 EST m=+0.145458147 image pull-error  localhost:5000/libpod/testimage:20210610 initializing source docker://localhost:5000/libpod/testimage:20210610: pinging container registry localhost:5000: Get "https://localhost:5000/v2/": tls: failed to verify certificate: x509: certificate signed by unknown authority
Dec 14 11:32:12 managed-node1 python3.12[14812]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Dec 14 11:32:14 managed-node1 python3.12[14949]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Dec 14 11:32:16 managed-node1 python3.12[15082]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Dec 14 11:32:17 managed-node1 python3.12[15215]: ansible-file Invoked with path=/etc/containers/certs.d/localhost:5000 state=directory owner=root group=0 mode=0700 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Dec 14 11:32:20 managed-node1 python3.12[15582]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Dec 14 11:32:21 managed-node1 python3.12[15715]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Dec 14 11:32:22 managed-node1 podman[15856]: 2024-12-14 11:32:22.830406841 -0500 EST m=+0.198941135 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f localhost:5000/libpod/testimage:20210610
Dec 14 11:32:23 managed-node1 python3.12[16001]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Dec 14 11:32:23 managed-node1 python3.12[16132]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Dec 14 11:32:24 managed-node1 python3.12[16263]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Dec 14 11:32:24 managed-node1 python3.12[16368]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1734193943.8234994-10632-256821299067559/.source.yml _original_basename=.h1hpyflq follow=False checksum=fb0097683a2e5c8909a8037d64ddc1b350aed0be backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None
Dec 14 11:32:25 managed-node1 python3.12[16499]: ansible-containers.podman.podman_play Invoked with state=started kube_file=/etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None
Dec 14 11:32:25 managed-node1 python3.12[16643]: ansible-ansible.legacy.command Invoked with _raw_params=journalctl -ex _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None

TASK [Remove all container resources - root] ***********************************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/tests/podman/tests_auth_and_security.yml:208
Saturday 14 December 2024  11:32:25 -0500 (0:00:00.517)       0:02:21.680 ***** 
included: fedora.linux_system_roles.podman for managed-node1

TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:3
Saturday 14 December 2024  11:32:26 -0500 (0:00:00.150)       0:02:21.831 ***** 
included: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml for managed-node1

TASK [fedora.linux_system_roles.podman : Ensure ansible_facts used by role] ****
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:3
Saturday 14 December 2024  11:32:26 -0500 (0:00:00.052)       0:02:21.883 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "__podman_required_facts | difference(ansible_facts.keys() | list) | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Check if system is ostree] ************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:11
Saturday 14 December 2024  11:32:26 -0500 (0:00:00.038)       0:02:21.922 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "not __podman_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set flag to indicate system is ostree] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:16
Saturday 14 December 2024  11:32:26 -0500 (0:00:00.033)       0:02:21.955 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "not __podman_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Check if transactional-update exists in /sbin] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:23
Saturday 14 December 2024  11:32:26 -0500 (0:00:00.037)       0:02:21.992 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "not __podman_is_transactional is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set flag if transactional-update exists] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:28
Saturday 14 December 2024  11:32:26 -0500 (0:00:00.050)       0:02:22.043 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "not __podman_is_transactional is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:32
Saturday 14 December 2024  11:32:26 -0500 (0:00:00.041)       0:02:22.085 ***** 
ok: [managed-node1] => (item=RedHat.yml) => {
    "ansible_facts": {
        "__podman_packages": [
            "podman",
            "shadow-utils-subid"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/vars/RedHat.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "RedHat.yml"
}
skipping: [managed-node1] => (item=CentOS.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "CentOS.yml",
    "skip_reason": "Conditional result was False"
}
ok: [managed-node1] => (item=CentOS_10.yml) => {
    "ansible_facts": {
        "__podman_packages": [
            "iptables-nft",
            "podman",
            "shadow-utils-subid"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_10.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_10.yml"
}
ok: [managed-node1] => (item=CentOS_10.yml) => {
    "ansible_facts": {
        "__podman_packages": [
            "iptables-nft",
            "podman",
            "shadow-utils-subid"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_10.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_10.yml"
}

TASK [fedora.linux_system_roles.podman : Gather the package facts] *************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6
Saturday 14 December 2024  11:32:26 -0500 (0:00:00.192)       0:02:22.278 ***** 
ok: [managed-node1] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Enable copr if requested] *************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:10
Saturday 14 December 2024  11:32:27 -0500 (0:00:00.989)       0:02:23.267 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "podman_use_copr | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Ensure required packages are installed] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:14
Saturday 14 December 2024  11:32:27 -0500 (0:00:00.055)       0:02:23.322 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "(__podman_packages | difference(ansible_facts.packages))",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Notify user that reboot is needed to apply changes] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:28
Saturday 14 December 2024  11:32:27 -0500 (0:00:00.125)       0:02:23.447 ***** 
skipping: [managed-node1] => {
    "false_condition": "__podman_is_transactional | d(false)"
}

TASK [fedora.linux_system_roles.podman : Reboot transactional update systems] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:33
Saturday 14 December 2024  11:32:27 -0500 (0:00:00.090)       0:02:23.538 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "__podman_is_transactional | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Fail if reboot is needed and not set] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:38
Saturday 14 December 2024  11:32:27 -0500 (0:00:00.102)       0:02:23.641 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "__podman_is_transactional | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Get podman version] *******************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:46
Saturday 14 December 2024  11:32:27 -0500 (0:00:00.055)       0:02:23.696 ***** 
ok: [managed-node1] => {
    "changed": false,
    "cmd": [
        "podman",
        "--version"
    ],
    "delta": "0:00:00.024388",
    "end": "2024-12-14 11:32:28.317650",
    "rc": 0,
    "start": "2024-12-14 11:32:28.293262"
}

STDOUT:

podman version 5.3.1


STDERR:

time="2024-12-14T11:32:28-05:00" level=warning msg="Failed to decode the keys [\"storage.options.overlay.pull_options\" \"storage.options.overlay.pull_options\" \"storage.options.overlay.pull_options\" \"storage.options.overlay.pull_options.enable_partial_images\" \"storage.options.overlay.pull_options.use_hard_links\" \"storage.options.overlay.pull_options.ostree_repos\" \"storage.options.overlay.pull_options.convert_images\"] from \"/usr/share/containers/storage.conf\""

TASK [fedora.linux_system_roles.podman : Set podman version] *******************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:52
Saturday 14 December 2024  11:32:28 -0500 (0:00:00.452)       0:02:24.149 ***** 
ok: [managed-node1] => {
    "ansible_facts": {
        "podman_version": "5.3.1"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Podman package version must be 4.2 or later] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:56
Saturday 14 December 2024  11:32:28 -0500 (0:00:00.034)       0:02:24.183 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "podman_version is version(\"4.2\", \"<\")",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:63
Saturday 14 December 2024  11:32:28 -0500 (0:00:00.084)       0:02:24.268 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "podman_version is version(\"4.4\", \"<\")",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:73
Saturday 14 December 2024  11:32:28 -0500 (0:00:00.037)       0:02:24.306 ***** 
META: end_host conditional evaluated to False, continuing execution for managed-node1
skipping: [managed-node1] => {
    "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node1"
}

MSG:

end_host conditional evaluated to false, continuing execution for managed-node1

TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:80
Saturday 14 December 2024  11:32:28 -0500 (0:00:00.038)       0:02:24.344 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "__has_type_pod or __has_pod_file_ext or __has_pod_file_src_ext or __has_pod_template_src_ext or __has_pod_template_src_ext_j2",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:96
Saturday 14 December 2024  11:32:28 -0500 (0:00:00.055)       0:02:24.399 ***** 
META: end_host conditional evaluated to False, continuing execution for managed-node1
skipping: [managed-node1] => {
    "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node1"
}

MSG:

end_host conditional evaluated to false, continuing execution for managed-node1

TASK [fedora.linux_system_roles.podman : Check user and group information] *****
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:109
Saturday 14 December 2024  11:32:28 -0500 (0:00:00.078)       0:02:24.478 ***** 
included: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1

TASK [fedora.linux_system_roles.podman : Get user information] *****************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2
Saturday 14 December 2024  11:32:28 -0500 (0:00:00.115)       0:02:24.594 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Fail if user does not exist] **********
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9
Saturday 14 December 2024  11:32:28 -0500 (0:00:00.096)       0:02:24.691 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set group for podman user] ************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16
Saturday 14 December 2024  11:32:29 -0500 (0:00:00.089)       0:02:24.780 ***** 
ok: [managed-node1] => {
    "ansible_facts": {
        "__podman_group": "0"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : See if getsubids exists] **************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:28
Saturday 14 December 2024  11:32:29 -0500 (0:00:00.082)       0:02:24.863 ***** 
ok: [managed-node1] => {
    "changed": false,
    "stat": {
        "atime": 1734193880.1628356,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 32,
        "charset": "binary",
        "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97",
        "ctime": 1734193861.1678128,
        "dev": 51714,
        "device_type": 0,
        "executable": true,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 8859182,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "application/x-pie-executable",
        "mode": "0755",
        "mtime": 1730678400.0,
        "nlink": 1,
        "path": "/usr/bin/getsubids",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": true,
        "rusr": true,
        "size": 15744,
        "uid": 0,
        "version": "2878164177",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": true,
        "xoth": true,
        "xusr": true
    }
}

TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:39
Saturday 14 December 2024  11:32:29 -0500 (0:00:00.423)       0:02:25.287 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "__podman_user not in [\"root\", \"0\"]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:44
Saturday 14 December 2024  11:32:29 -0500 (0:00:00.034)       0:02:25.321 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "__podman_user not in [\"root\", \"0\"]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:49
Saturday 14 December 2024  11:32:29 -0500 (0:00:00.031)       0:02:25.352 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "__podman_user not in [\"root\", \"0\"]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Get subuid file] **********************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:62
Saturday 14 December 2024  11:32:29 -0500 (0:00:00.034)       0:02:25.387 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Get subgid file] **********************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:67
Saturday 14 December 2024  11:32:29 -0500 (0:00:00.031)       0:02:25.418 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:72
Saturday 14 December 2024  11:32:29 -0500 (0:00:00.033)       0:02:25.451 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ******
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:82
Saturday 14 December 2024  11:32:29 -0500 (0:00:00.032)       0:02:25.483 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ******
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:89
Saturday 14 December 2024  11:32:29 -0500 (0:00:00.050)       0:02:25.533 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set config file paths] ****************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:115
Saturday 14 December 2024  11:32:29 -0500 (0:00:00.085)       0:02:25.619 ***** 
ok: [managed-node1] => {
    "ansible_facts": {
        "__podman_container_conf_file": "/etc/containers/containers.conf.d/50-systemroles.conf",
        "__podman_policy_json_file": "/etc/containers/policy.json",
        "__podman_registries_conf_file": "/etc/containers/registries.conf.d/50-systemroles.conf",
        "__podman_storage_conf_file": "/etc/containers/storage.conf"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Handle container.conf.d] **************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:124
Saturday 14 December 2024  11:32:29 -0500 (0:00:00.090)       0:02:25.709 ***** 
included: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml for managed-node1

TASK [fedora.linux_system_roles.podman : Ensure containers.d exists] ***********
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:5
Saturday 14 December 2024  11:32:30 -0500 (0:00:00.224)       0:02:25.934 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "podman_containers_conf | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Update container config file] *********
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:13
Saturday 14 December 2024  11:32:30 -0500 (0:00:00.053)       0:02:25.988 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "podman_containers_conf | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Handle registries.conf.d] *************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:127
Saturday 14 December 2024  11:32:30 -0500 (0:00:00.089)       0:02:26.078 ***** 
included: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml for managed-node1

TASK [fedora.linux_system_roles.podman : Ensure registries.d exists] ***********
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:5
Saturday 14 December 2024  11:32:30 -0500 (0:00:00.123)       0:02:26.202 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "podman_registries_conf | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Update registries config file] ********
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:13
Saturday 14 December 2024  11:32:30 -0500 (0:00:00.061)       0:02:26.264 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "podman_registries_conf | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Handle storage.conf] ******************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:130
Saturday 14 December 2024  11:32:30 -0500 (0:00:00.041)       0:02:26.305 ***** 
included: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml for managed-node1

TASK [fedora.linux_system_roles.podman : Ensure storage.conf parent dir exists] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:5
Saturday 14 December 2024  11:32:30 -0500 (0:00:00.099)       0:02:26.405 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "podman_storage_conf | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Update storage config file] ***********
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:13
Saturday 14 December 2024  11:32:30 -0500 (0:00:00.049)       0:02:26.454 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "podman_storage_conf | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Handle policy.json] *******************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:133
Saturday 14 December 2024  11:32:30 -0500 (0:00:00.040)       0:02:26.495 ***** 
included: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml for managed-node1

TASK [fedora.linux_system_roles.podman : Ensure policy.json parent dir exists] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:6
Saturday 14 December 2024  11:32:30 -0500 (0:00:00.068)       0:02:26.563 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "podman_policy_json | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Stat the policy.json file] ************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:14
Saturday 14 December 2024  11:32:30 -0500 (0:00:00.032)       0:02:26.595 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "podman_policy_json | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Get the existing policy.json] *********
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:19
Saturday 14 December 2024  11:32:30 -0500 (0:00:00.034)       0:02:26.630 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "podman_policy_json | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Write new policy.json file] ***********
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:25
Saturday 14 December 2024  11:32:30 -0500 (0:00:00.052)       0:02:26.682 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "podman_policy_json | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [Manage firewall for specified ports] *************************************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:139
Saturday 14 December 2024  11:32:30 -0500 (0:00:00.040)       0:02:26.722 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "podman_firewall | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [Manage selinux for specified ports] **************************************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:146
Saturday 14 December 2024  11:32:31 -0500 (0:00:00.049)       0:02:26.772 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "podman_selinux_ports | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Keep track of users that need to cancel linger] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:153
Saturday 14 December 2024  11:32:31 -0500 (0:00:00.056)       0:02:26.828 ***** 
ok: [managed-node1] => {
    "ansible_facts": {
        "__podman_cancel_user_linger": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Handle certs.d files - present] *******
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:157
Saturday 14 December 2024  11:32:31 -0500 (0:00:00.055)       0:02:26.883 ***** 
included: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_certs_d.yml for managed-node1 => (item=(censored due to no_log))

TASK [fedora.linux_system_roles.podman : Check given registry_host] ************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_certs_d.yml:3
Saturday 14 December 2024  11:32:31 -0500 (0:00:00.171)       0:02:27.055 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "__podman_cert_spec_item[\"registry_host\"] is search(\"/\")",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set per-cert spec variables part 0] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_certs_d.yml:10
Saturday 14 December 2024  11:32:31 -0500 (0:00:00.053)       0:02:27.109 ***** 
ok: [managed-node1] => {
    "ansible_facts": {
        "__podman_user": "root"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Set per-cert spec variables part 1] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_certs_d.yml:14
Saturday 14 December 2024  11:32:31 -0500 (0:00:00.054)       0:02:27.163 ***** 
ok: [managed-node1] => {
    "ansible_facts": {
        "__podman_rootless": false
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Check user and group information] *****
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_certs_d.yml:18
Saturday 14 December 2024  11:32:31 -0500 (0:00:00.055)       0:02:27.219 ***** 
included: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1

TASK [fedora.linux_system_roles.podman : Get user information] *****************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2
Saturday 14 December 2024  11:32:31 -0500 (0:00:00.147)       0:02:27.366 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Fail if user does not exist] **********
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9
Saturday 14 December 2024  11:32:31 -0500 (0:00:00.112)       0:02:27.479 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set group for podman user] ************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16
Saturday 14 December 2024  11:32:31 -0500 (0:00:00.094)       0:02:27.574 ***** 
ok: [managed-node1] => {
    "ansible_facts": {
        "__podman_group": "0"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : See if getsubids exists] **************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:28
Saturday 14 December 2024  11:32:31 -0500 (0:00:00.137)       0:02:27.711 ***** 
ok: [managed-node1] => {
    "changed": false,
    "stat": {
        "atime": 1734193880.1628356,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 32,
        "charset": "binary",
        "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97",
        "ctime": 1734193861.1678128,
        "dev": 51714,
        "device_type": 0,
        "executable": true,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 8859182,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "application/x-pie-executable",
        "mode": "0755",
        "mtime": 1730678400.0,
        "nlink": 1,
        "path": "/usr/bin/getsubids",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": true,
        "rusr": true,
        "size": 15744,
        "uid": 0,
        "version": "2878164177",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": true,
        "xoth": true,
        "xusr": true
    }
}

TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:39
Saturday 14 December 2024  11:32:32 -0500 (0:00:00.511)       0:02:28.222 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "__podman_user not in [\"root\", \"0\"]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:44
Saturday 14 December 2024  11:32:32 -0500 (0:00:00.064)       0:02:28.287 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "__podman_user not in [\"root\", \"0\"]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:49
Saturday 14 December 2024  11:32:32 -0500 (0:00:00.069)       0:02:28.356 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "__podman_user not in [\"root\", \"0\"]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Get subuid file] **********************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:62
Saturday 14 December 2024  11:32:32 -0500 (0:00:00.053)       0:02:28.410 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Get subgid file] **********************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:67
Saturday 14 December 2024  11:32:32 -0500 (0:00:00.050)       0:02:28.461 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:72
Saturday 14 December 2024  11:32:32 -0500 (0:00:00.055)       0:02:28.516 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ******
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:82
Saturday 14 December 2024  11:32:32 -0500 (0:00:00.052)       0:02:28.569 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ******
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:89
Saturday 14 December 2024  11:32:32 -0500 (0:00:00.046)       0:02:28.615 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set per-cert spec variables part 2] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_certs_d.yml:23
Saturday 14 December 2024  11:32:32 -0500 (0:00:00.049)       0:02:28.664 ***** 
ok: [managed-node1] => {
    "ansible_facts": {
        "__podman_user_home_dir": "/root"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Set per-cert spec variables part 3] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_certs_d.yml:27
Saturday 14 December 2024  11:32:32 -0500 (0:00:00.065)       0:02:28.730 ***** 
ok: [managed-node1] => {
    "ansible_facts": {
        "__podman_certs_d_path": "/etc/containers/certs.d/localhost:5000"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Set per-cert spec variables part 4] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_certs_d.yml:31
Saturday 14 December 2024  11:32:33 -0500 (0:00:00.066)       0:02:28.796 ***** 
ok: [managed-node1] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Ensure certs.d directory] *************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_certs_d.yml:50
Saturday 14 December 2024  11:32:33 -0500 (0:00:00.143)       0:02:28.940 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "__podman_cert_spec_item[\"state\"] | d(\"present\") == \"present\"",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Ensure certs.d files] *****************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_certs_d.yml:58
Saturday 14 December 2024  11:32:33 -0500 (0:00:00.154)       0:02:29.095 ***** 
skipping: [managed-node1] => (item=None)  => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}
skipping: [managed-node1] => (item=None)  => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}
skipping: [managed-node1] => (item=None)  => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}
skipping: [managed-node1] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Remove certs.d files] *****************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_certs_d.yml:75
Saturday 14 December 2024  11:32:33 -0500 (0:00:00.085)       0:02:29.180 ***** 
skipping: [managed-node1] => (item=None)  => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}
skipping: [managed-node1] => (item=None)  => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}
skipping: [managed-node1] => (item=None)  => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}
skipping: [managed-node1] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Find files in certs.d directory] ******
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_certs_d.yml:82
Saturday 14 December 2024  11:32:33 -0500 (0:00:00.082)       0:02:29.263 ***** 
skipping: [managed-node1] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Ensure the certs.d directory is absent if empty] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_certs_d.yml:90
Saturday 14 December 2024  11:32:33 -0500 (0:00:00.061)       0:02:29.324 ***** 
skipping: [managed-node1] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Handle credential files - present] ****
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:166
Saturday 14 December 2024  11:32:33 -0500 (0:00:00.053)       0:02:29.378 ***** 
included: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_credential_files.yml for managed-node1 => (item=(censored due to no_log))

TASK [fedora.linux_system_roles.podman : Set user and group] *******************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_credential_files.yml:3
Saturday 14 December 2024  11:32:33 -0500 (0:00:00.128)       0:02:29.507 ***** 
ok: [managed-node1] => {
    "ansible_facts": {
        "__podman_credential_group": "",
        "__podman_credential_user": "root"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Check user and group information] *****
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_credential_files.yml:9
Saturday 14 December 2024  11:32:33 -0500 (0:00:00.056)       0:02:29.564 ***** 
included: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1

TASK [fedora.linux_system_roles.podman : Get user information] *****************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2
Saturday 14 December 2024  11:32:33 -0500 (0:00:00.091)       0:02:29.655 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Fail if user does not exist] **********
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9
Saturday 14 December 2024  11:32:33 -0500 (0:00:00.042)       0:02:29.698 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set group for podman user] ************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16
Saturday 14 December 2024  11:32:33 -0500 (0:00:00.040)       0:02:29.739 ***** 
ok: [managed-node1] => {
    "ansible_facts": {
        "__podman_group": "0"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : See if getsubids exists] **************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:28
Saturday 14 December 2024  11:32:34 -0500 (0:00:00.045)       0:02:29.784 ***** 
ok: [managed-node1] => {
    "changed": false,
    "stat": {
        "atime": 1734193880.1628356,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 32,
        "charset": "binary",
        "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97",
        "ctime": 1734193861.1678128,
        "dev": 51714,
        "device_type": 0,
        "executable": true,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 8859182,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "application/x-pie-executable",
        "mode": "0755",
        "mtime": 1730678400.0,
        "nlink": 1,
        "path": "/usr/bin/getsubids",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": true,
        "rusr": true,
        "size": 15744,
        "uid": 0,
        "version": "2878164177",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": true,
        "xoth": true,
        "xusr": true
    }
}

TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:39
Saturday 14 December 2024  11:32:34 -0500 (0:00:00.415)       0:02:30.200 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "__podman_user not in [\"root\", \"0\"]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:44
Saturday 14 December 2024  11:32:34 -0500 (0:00:00.052)       0:02:30.253 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "__podman_user not in [\"root\", \"0\"]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:49
Saturday 14 December 2024  11:32:34 -0500 (0:00:00.061)       0:02:30.315 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "__podman_user not in [\"root\", \"0\"]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Get subuid file] **********************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:62
Saturday 14 December 2024  11:32:34 -0500 (0:00:00.072)       0:02:30.388 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Get subgid file] **********************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:67
Saturday 14 December 2024  11:32:34 -0500 (0:00:00.053)       0:02:30.441 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:72
Saturday 14 December 2024  11:32:34 -0500 (0:00:00.051)       0:02:30.492 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ******
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:82
Saturday 14 December 2024  11:32:34 -0500 (0:00:00.052)       0:02:30.545 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ******
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:89
Saturday 14 December 2024  11:32:34 -0500 (0:00:00.048)       0:02:30.593 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set credential variables] *************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_credential_files.yml:15
Saturday 14 December 2024  11:32:34 -0500 (0:00:00.105)       0:02:30.699 ***** 
ok: [managed-node1] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Ensure the credentials directory is present] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_credential_files.yml:30
Saturday 14 December 2024  11:32:35 -0500 (0:00:00.103)       0:02:30.803 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "__podman_credential_item[\"state\"] | d(\"present\") == \"present\"",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Ensure credential file is copied] *****
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_credential_files.yml:38
Saturday 14 December 2024  11:32:35 -0500 (0:00:00.056)       0:02:30.859 ***** 
skipping: [managed-node1] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Ensure credential file content is present] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_credential_files.yml:48
Saturday 14 December 2024  11:32:35 -0500 (0:00:00.053)       0:02:30.913 ***** 
skipping: [managed-node1] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Ensure credential file is absent] *****
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_credential_files.yml:65
Saturday 14 December 2024  11:32:35 -0500 (0:00:00.054)       0:02:30.967 ***** 
skipping: [managed-node1] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Find files in credentials directory] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_credential_files.yml:71
Saturday 14 December 2024  11:32:35 -0500 (0:00:00.053)       0:02:31.021 ***** 
skipping: [managed-node1] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Ensure the credentials directory is absent if empty] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_credential_files.yml:79
Saturday 14 December 2024  11:32:35 -0500 (0:00:00.045)       0:02:31.066 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "__podman_handle_state == \"absent\"",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Handle secrets] ***********************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:175
Saturday 14 December 2024  11:32:35 -0500 (0:00:00.036)       0:02:31.102 ***** 
skipping: [managed-node1] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Handle Kubernetes specifications] *****
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:182
Saturday 14 December 2024  11:32:35 -0500 (0:00:00.033)       0:02:31.136 ***** 
included: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml for managed-node1 => (item=(censored due to no_log))

TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:14
Saturday 14 December 2024  11:32:35 -0500 (0:00:00.103)       0:02:31.239 ***** 
ok: [managed-node1] => {
    "ansible_facts": {
        "__podman_kube_spec": {
            "state": "absent"
        },
        "__podman_kube_str": "apiVersion: v1\nkind: Pod\nmetadata:\n    labels:\n        app: test\n        io.containers.autoupdate: registry\n    name: auth_test_1_kube\nspec:\n    containers:\n    -   image: localhost:5000/libpod/testimage:20210610\n        name: auth_test_1_kube\n"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:21
Saturday 14 December 2024  11:32:35 -0500 (0:00:00.043)       0:02:31.283 ***** 
ok: [managed-node1] => {
    "ansible_facts": {
        "__podman_continue_if_pull_fails": false,
        "__podman_kube": {
            "apiVersion": "v1",
            "kind": "Pod",
            "metadata": {
                "labels": {
                    "app": "test",
                    "io.containers.autoupdate": "registry"
                },
                "name": "auth_test_1_kube"
            },
            "spec": {
                "containers": [
                    {
                        "image": "localhost:5000/libpod/testimage:20210610",
                        "name": "auth_test_1_kube"
                    }
                ]
            }
        },
        "__podman_kube_file": "",
        "__podman_pull_image": true,
        "__podman_state": "absent",
        "__podman_systemd_unit_scope": "",
        "__podman_user": "root"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:33
Saturday 14 December 2024  11:32:35 -0500 (0:00:00.045)       0:02:31.328 ***** 
ok: [managed-node1] => {
    "ansible_facts": {
        "__podman_kube_name": "auth_test_1_kube",
        "__podman_rootless": false
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Check user and group information] *****
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:38
Saturday 14 December 2024  11:32:35 -0500 (0:00:00.036)       0:02:31.365 ***** 
included: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1

TASK [fedora.linux_system_roles.podman : Get user information] *****************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2
Saturday 14 December 2024  11:32:35 -0500 (0:00:00.060)       0:02:31.426 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Fail if user does not exist] **********
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9
Saturday 14 December 2024  11:32:35 -0500 (0:00:00.035)       0:02:31.461 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set group for podman user] ************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16
Saturday 14 December 2024  11:32:35 -0500 (0:00:00.036)       0:02:31.498 ***** 
ok: [managed-node1] => {
    "ansible_facts": {
        "__podman_group": "0"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : See if getsubids exists] **************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:28
Saturday 14 December 2024  11:32:35 -0500 (0:00:00.044)       0:02:31.543 ***** 
ok: [managed-node1] => {
    "changed": false,
    "stat": {
        "atime": 1734193880.1628356,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 32,
        "charset": "binary",
        "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97",
        "ctime": 1734193861.1678128,
        "dev": 51714,
        "device_type": 0,
        "executable": true,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 8859182,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "application/x-pie-executable",
        "mode": "0755",
        "mtime": 1730678400.0,
        "nlink": 1,
        "path": "/usr/bin/getsubids",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": true,
        "rusr": true,
        "size": 15744,
        "uid": 0,
        "version": "2878164177",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": true,
        "xoth": true,
        "xusr": true
    }
}

TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:39
Saturday 14 December 2024  11:32:36 -0500 (0:00:00.382)       0:02:31.925 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "__podman_user not in [\"root\", \"0\"]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:44
Saturday 14 December 2024  11:32:36 -0500 (0:00:00.081)       0:02:32.007 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "__podman_user not in [\"root\", \"0\"]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:49
Saturday 14 December 2024  11:32:36 -0500 (0:00:00.128)       0:02:32.135 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "__podman_user not in [\"root\", \"0\"]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Get subuid file] **********************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:62
Saturday 14 December 2024  11:32:36 -0500 (0:00:00.051)       0:02:32.187 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Get subgid file] **********************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:67
Saturday 14 December 2024  11:32:36 -0500 (0:00:00.052)       0:02:32.239 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:72
Saturday 14 December 2024  11:32:36 -0500 (0:00:00.050)       0:02:32.289 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ******
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:82
Saturday 14 December 2024  11:32:36 -0500 (0:00:00.055)       0:02:32.345 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ******
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:89
Saturday 14 December 2024  11:32:36 -0500 (0:00:00.055)       0:02:32.400 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Fail if no kube spec is given] ********
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:43
Saturday 14 December 2024  11:32:36 -0500 (0:00:00.056)       0:02:32.457 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "not __podman_kube",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:52
Saturday 14 December 2024  11:32:36 -0500 (0:00:00.051)       0:02:32.508 ***** 
ok: [managed-node1] => {
    "ansible_facts": {
        "__podman_activate_systemd_unit": true,
        "__podman_systemd_scope": "system",
        "__podman_user_home_dir": "/root",
        "__podman_xdg_runtime_dir": "/run/user/0"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:60
Saturday 14 December 2024  11:32:36 -0500 (0:00:00.084)       0:02:32.593 ***** 
ok: [managed-node1] => {
    "ansible_facts": {
        "__podman_kube_path": "/etc/containers/ansible-kubernetes.d"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:64
Saturday 14 December 2024  11:32:36 -0500 (0:00:00.087)       0:02:32.681 ***** 
ok: [managed-node1] => {
    "ansible_facts": {
        "__podman_kube_file": "/etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:68
Saturday 14 December 2024  11:32:37 -0500 (0:00:00.114)       0:02:32.795 ***** 
ok: [managed-node1] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Get service name using systemd-escape] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:75
Saturday 14 December 2024  11:32:37 -0500 (0:00:00.099)       0:02:32.895 ***** 
ok: [managed-node1] => {
    "changed": false,
    "cmd": [
        "systemd-escape",
        "--template",
        "podman-kube@.service",
        "/etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml"
    ],
    "delta": "0:00:00.005357",
    "end": "2024-12-14 11:32:37.529806",
    "rc": 0,
    "start": "2024-12-14 11:32:37.524449"
}

STDOUT:

podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service

TASK [fedora.linux_system_roles.podman : Cleanup containers and services] ******
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:83
Saturday 14 December 2024  11:32:37 -0500 (0:00:00.518)       0:02:33.414 ***** 
included: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_kube_spec.yml for managed-node1

TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] *****************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_kube_spec.yml:2
Saturday 14 December 2024  11:32:37 -0500 (0:00:00.139)       0:02:33.553 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "__podman_rootless | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Stop and disable service] *************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_kube_spec.yml:10
Saturday 14 December 2024  11:32:37 -0500 (0:00:00.053)       0:02:33.606 ***** 
ok: [managed-node1] => {
    "changed": false,
    "enabled": false,
    "failed_when_result": false,
    "name": "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service",
    "state": "stopped",
    "status": {
        "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0",
        "ActiveEnterTimestampMonotonic": "0",
        "ActiveExitTimestampMonotonic": "0",
        "ActiveState": "inactive",
        "After": "\"system-podman\\\\x2dkube.slice\" systemd-journald.socket sysinit.target -.mount basic.target network-online.target",
        "AllowIsolate": "no",
        "AssertResult": "no",
        "AssertTimestampMonotonic": "0",
        "Before": "shutdown.target",
        "BlockIOAccounting": "no",
        "BlockIOWeight": "[not set]",
        "CPUAccounting": "yes",
        "CPUAffinityFromNUMA": "no",
        "CPUQuotaPerSecUSec": "infinity",
        "CPUQuotaPeriodUSec": "infinity",
        "CPUSchedulingPolicy": "0",
        "CPUSchedulingPriority": "0",
        "CPUSchedulingResetOnFork": "no",
        "CPUShares": "[not set]",
        "CPUUsageNSec": "[not set]",
        "CPUWeight": "[not set]",
        "CacheDirectoryMode": "0755",
        "CanFreeze": "yes",
        "CanIsolate": "no",
        "CanReload": "no",
        "CanStart": "yes",
        "CanStop": "yes",
        "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore",
        "CleanResult": "success",
        "CollectMode": "inactive",
        "ConditionResult": "no",
        "ConditionTimestampMonotonic": "0",
        "ConfigurationDirectoryMode": "0755",
        "Conflicts": "shutdown.target",
        "ControlGroupId": "0",
        "ControlPID": "0",
        "CoredumpFilter": "0x33",
        "CoredumpReceive": "no",
        "DefaultDependencies": "yes",
        "DefaultMemoryLow": "0",
        "DefaultMemoryMin": "0",
        "DefaultStartupMemoryLow": "0",
        "Delegate": "no",
        "Description": "A template for running K8s workloads via podman-kube-play",
        "DevicePolicy": "auto",
        "Documentation": "\"man:podman-kube-play(1)\"",
        "DynamicUser": "no",
        "EffectiveMemoryHigh": "3698233344",
        "EffectiveMemoryMax": "3698233344",
        "EffectiveTasksMax": "22361",
        "Environment": "\"PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\\\x2dkubernetes.d-auth_test_1_kube.yml.service\"",
        "ExecMainCode": "0",
        "ExecMainExitTimestampMonotonic": "0",
        "ExecMainHandoffTimestampMonotonic": "0",
        "ExecMainPID": "0",
        "ExecMainStartTimestampMonotonic": "0",
        "ExecMainStatus": "0",
        "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube play --replace --service-container=true /etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
        "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube play --replace --service-container=true /etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
        "ExecStop": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube down /etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
        "ExecStopEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube down /etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
        "ExitType": "main",
        "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
        "FailureAction": "none",
        "FileDescriptorStoreMax": "0",
        "FileDescriptorStorePreserve": "restart",
        "FinalKillSignal": "9",
        "FragmentPath": "/usr/lib/systemd/system/podman-kube@.service",
        "FreezerState": "running",
        "GID": "[not set]",
        "GuessMainPID": "yes",
        "IOAccounting": "no",
        "IOReadBytes": "[not set]",
        "IOReadOperations": "[not set]",
        "IOSchedulingClass": "2",
        "IOSchedulingPriority": "4",
        "IOWeight": "[not set]",
        "IOWriteBytes": "[not set]",
        "IOWriteOperations": "[not set]",
        "IPAccounting": "no",
        "IPEgressBytes": "[no data]",
        "IPEgressPackets": "[no data]",
        "IPIngressBytes": "[no data]",
        "IPIngressPackets": "[no data]",
        "Id": "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service",
        "IgnoreOnIsolate": "no",
        "IgnoreSIGPIPE": "yes",
        "InactiveEnterTimestampMonotonic": "0",
        "InactiveExitTimestampMonotonic": "0",
        "JobRunningTimeoutUSec": "infinity",
        "JobTimeoutAction": "none",
        "JobTimeoutUSec": "infinity",
        "KeyringMode": "private",
        "KillMode": "control-group",
        "KillSignal": "15",
        "LimitAS": "infinity",
        "LimitASSoft": "infinity",
        "LimitCORE": "infinity",
        "LimitCORESoft": "infinity",
        "LimitCPU": "infinity",
        "LimitCPUSoft": "infinity",
        "LimitDATA": "infinity",
        "LimitDATASoft": "infinity",
        "LimitFSIZE": "infinity",
        "LimitFSIZESoft": "infinity",
        "LimitLOCKS": "infinity",
        "LimitLOCKSSoft": "infinity",
        "LimitMEMLOCK": "8388608",
        "LimitMEMLOCKSoft": "8388608",
        "LimitMSGQUEUE": "819200",
        "LimitMSGQUEUESoft": "819200",
        "LimitNICE": "0",
        "LimitNICESoft": "0",
        "LimitNOFILE": "524288",
        "LimitNOFILESoft": "1024",
        "LimitNPROC": "13976",
        "LimitNPROCSoft": "13976",
        "LimitRSS": "infinity",
        "LimitRSSSoft": "infinity",
        "LimitRTPRIO": "0",
        "LimitRTPRIOSoft": "0",
        "LimitRTTIME": "infinity",
        "LimitRTTIMESoft": "infinity",
        "LimitSIGPENDING": "13976",
        "LimitSIGPENDINGSoft": "13976",
        "LimitSTACK": "infinity",
        "LimitSTACKSoft": "8388608",
        "LoadState": "loaded",
        "LockPersonality": "no",
        "LogLevelMax": "-1",
        "LogRateLimitBurst": "0",
        "LogRateLimitIntervalUSec": "0",
        "LogsDirectoryMode": "0755",
        "MainPID": "0",
        "ManagedOOMMemoryPressure": "auto",
        "ManagedOOMMemoryPressureLimit": "0",
        "ManagedOOMPreference": "none",
        "ManagedOOMSwap": "auto",
        "MemoryAccounting": "yes",
        "MemoryAvailable": "3221000192",
        "MemoryCurrent": "[not set]",
        "MemoryDenyWriteExecute": "no",
        "MemoryHigh": "infinity",
        "MemoryKSM": "no",
        "MemoryLimit": "infinity",
        "MemoryLow": "0",
        "MemoryMax": "infinity",
        "MemoryMin": "0",
        "MemoryPeak": "[not set]",
        "MemoryPressureThresholdUSec": "200ms",
        "MemoryPressureWatch": "auto",
        "MemorySwapCurrent": "[not set]",
        "MemorySwapMax": "infinity",
        "MemorySwapPeak": "[not set]",
        "MemoryZSwapCurrent": "[not set]",
        "MemoryZSwapMax": "infinity",
        "MemoryZSwapWriteback": "yes",
        "MountAPIVFS": "no",
        "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
        "NFileDescriptorStore": "0",
        "NRestarts": "0",
        "NUMAPolicy": "n/a",
        "Names": "\"podman-kube@-etc-containers-ansible\\\\x2dkubernetes.d-auth_test_1_kube.yml.service\"",
        "NeedDaemonReload": "no",
        "Nice": "0",
        "NoNewPrivileges": "no",
        "NonBlocking": "no",
        "NotifyAccess": "all",
        "OOMPolicy": "stop",
        "OOMScoreAdjust": "0",
        "OnFailureJobMode": "replace",
        "OnSuccessJobMode": "fail",
        "Perpetual": "no",
        "PrivateDevices": "no",
        "PrivateIPC": "no",
        "PrivateMounts": "no",
        "PrivateNetwork": "no",
        "PrivateTmp": "no",
        "PrivateUsers": "no",
        "ProcSubset": "all",
        "ProtectClock": "no",
        "ProtectControlGroups": "no",
        "ProtectHome": "no",
        "ProtectHostname": "no",
        "ProtectKernelLogs": "no",
        "ProtectKernelModules": "no",
        "ProtectKernelTunables": "no",
        "ProtectProc": "default",
        "ProtectSystem": "no",
        "RefuseManualStart": "no",
        "RefuseManualStop": "no",
        "ReloadResult": "success",
        "ReloadSignal": "1",
        "RemainAfterExit": "no",
        "RemoveIPC": "no",
        "Requires": "sysinit.target -.mount \"system-podman\\\\x2dkube.slice\"",
        "RequiresMountsFor": "/run/containers",
        "Restart": "no",
        "RestartKillSignal": "15",
        "RestartMaxDelayUSec": "infinity",
        "RestartMode": "normal",
        "RestartSteps": "0",
        "RestartUSec": "100ms",
        "RestartUSecNext": "100ms",
        "RestrictNamespaces": "no",
        "RestrictRealtime": "no",
        "RestrictSUIDSGID": "no",
        "Result": "success",
        "RootDirectoryStartOnly": "no",
        "RootEphemeral": "no",
        "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
        "RuntimeDirectoryMode": "0755",
        "RuntimeDirectoryPreserve": "no",
        "RuntimeMaxUSec": "infinity",
        "RuntimeRandomizedExtraUSec": "0",
        "SameProcessGroup": "no",
        "SecureBits": "0",
        "SendSIGHUP": "no",
        "SendSIGKILL": "yes",
        "SetLoginEnvironment": "no",
        "Slice": "system-podman\\x2dkube.slice",
        "StandardError": "inherit",
        "StandardInput": "null",
        "StandardOutput": "journal",
        "StartLimitAction": "none",
        "StartLimitBurst": "5",
        "StartLimitIntervalUSec": "10s",
        "StartupBlockIOWeight": "[not set]",
        "StartupCPUShares": "[not set]",
        "StartupCPUWeight": "[not set]",
        "StartupIOWeight": "[not set]",
        "StartupMemoryHigh": "infinity",
        "StartupMemoryLow": "0",
        "StartupMemoryMax": "infinity",
        "StartupMemorySwapMax": "infinity",
        "StartupMemoryZSwapMax": "infinity",
        "StateChangeTimestampMonotonic": "0",
        "StateDirectoryMode": "0755",
        "StatusErrno": "0",
        "StopWhenUnneeded": "no",
        "SubState": "dead",
        "SuccessAction": "none",
        "SurviveFinalKillSignal": "no",
        "SyslogFacility": "3",
        "SyslogLevel": "6",
        "SyslogLevelPrefix": "yes",
        "SyslogPriority": "30",
        "SystemCallErrorNumber": "2147483646",
        "TTYReset": "no",
        "TTYVHangup": "no",
        "TTYVTDisallocate": "no",
        "TasksAccounting": "yes",
        "TasksCurrent": "[not set]",
        "TasksMax": "22361",
        "TimeoutAbortUSec": "1min 10s",
        "TimeoutCleanUSec": "infinity",
        "TimeoutStartFailureMode": "terminate",
        "TimeoutStartUSec": "1min 30s",
        "TimeoutStopFailureMode": "terminate",
        "TimeoutStopUSec": "1min 10s",
        "TimerSlackNSec": "50000",
        "Transient": "no",
        "Type": "notify",
        "UID": "[not set]",
        "UMask": "0022",
        "UnitFilePreset": "disabled",
        "UnitFileState": "disabled",
        "UtmpMode": "init",
        "Wants": "network-online.target",
        "WatchdogSignal": "6",
        "WatchdogTimestampMonotonic": "0",
        "WatchdogUSec": "infinity"
    }
}

TASK [fedora.linux_system_roles.podman : Check if kube file exists] ************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_kube_spec.yml:28
Saturday 14 December 2024  11:32:38 -0500 (0:00:00.685)       0:02:34.292 ***** 
ok: [managed-node1] => {
    "changed": false,
    "stat": {
        "atime": 1734193945.3178718,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 8,
        "charset": "us-ascii",
        "checksum": "fb0097683a2e5c8909a8037d64ddc1b350aed0be",
        "ctime": 1734193944.4408734,
        "dev": 51714,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 297795802,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "text/plain",
        "mode": "0644",
        "mtime": 1734193944.1558738,
        "nlink": 1,
        "path": "/etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": true,
        "rusr": true,
        "size": 244,
        "uid": 0,
        "version": "925953677",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.podman : Remove pod/containers] ****************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_kube_spec.yml:38
Saturday 14 December 2024  11:32:39 -0500 (0:00:00.493)       0:02:34.786 ***** 
ok: [managed-node1] => {
    "actions": [
        "/usr/bin/podman kube play --down /etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml"
    ],
    "changed": false,
    "failed_when_result": false
}

STDOUT:

Pods stopped:
Pods removed:
Secrets removed:
Volumes removed:



STDERR:

time="2024-12-14T11:32:39-05:00" level=warning msg="Failed to decode the keys [\"storage.options.overlay.pull_options\" \"storage.options.overlay.pull_options\" \"storage.options.overlay.pull_options\" \"storage.options.overlay.pull_options.enable_partial_images\" \"storage.options.overlay.pull_options.use_hard_links\" \"storage.options.overlay.pull_options.ostree_repos\" \"storage.options.overlay.pull_options.convert_images\"] from \"/usr/share/containers/storage.conf\""


TASK [fedora.linux_system_roles.podman : Remove kubernetes yaml file] **********
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_kube_spec.yml:56
Saturday 14 December 2024  11:32:39 -0500 (0:00:00.593)       0:02:35.379 ***** 
changed: [managed-node1] => {
    "changed": true,
    "path": "/etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml",
    "state": "absent"
}

TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ********
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_kube_spec.yml:61
Saturday 14 December 2024  11:32:40 -0500 (0:00:00.500)       0:02:35.880 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "__podman_removed is changed",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Manage linger] ************************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_kube_spec.yml:70
Saturday 14 December 2024  11:32:40 -0500 (0:00:00.058)       0:02:35.939 ***** 
included: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node1

TASK [fedora.linux_system_roles.podman : Enable linger if needed] **************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12
Saturday 14 December 2024  11:32:40 -0500 (0:00:00.171)       0:02:36.110 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "__podman_rootless | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18
Saturday 14 December 2024  11:32:40 -0500 (0:00:00.053)       0:02:36.163 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "__podman_rootless | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22
Saturday 14 December 2024  11:32:40 -0500 (0:00:00.079)       0:02:36.243 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "__podman_rootless | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Create and update containers and services] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:87
Saturday 14 December 2024  11:32:40 -0500 (0:00:00.065)       0:02:36.308 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "__podman_state != \"absent\"",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Handle Quadlet specifications] ********
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:189
Saturday 14 December 2024  11:32:40 -0500 (0:00:00.079)       0:02:36.388 ***** 
included: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node1 => (item=(censored due to no_log))

TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14
Saturday 14 December 2024  11:32:40 -0500 (0:00:00.106)       0:02:36.494 ***** 
ok: [managed-node1] => {
    "ansible_facts": {
        "__podman_quadlet_file_src": "",
        "__podman_quadlet_spec": {
            "Container": {
                "ContainerName": "auth_test_1_quadlet",
                "Image": "localhost:5000/libpod/testimage:20210610"
            },
            "Install": {
                "WantedBy": "default.target"
            }
        },
        "__podman_quadlet_str": "",
        "__podman_quadlet_template_src": ""
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25
Saturday 14 December 2024  11:32:40 -0500 (0:00:00.043)       0:02:36.537 ***** 
ok: [managed-node1] => {
    "ansible_facts": {
        "__podman_continue_if_pull_fails": false,
        "__podman_pull_image": true,
        "__podman_state": "absent",
        "__podman_systemd_unit_scope": "",
        "__podman_user": "root"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] *****
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35
Saturday 14 December 2024  11:32:40 -0500 (0:00:00.044)       0:02:36.582 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "__podman_quadlet_spec | length == 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48
Saturday 14 December 2024  11:32:40 -0500 (0:00:00.049)       0:02:36.631 ***** 
ok: [managed-node1] => {
    "ansible_facts": {
        "__podman_quadlet_name": "auth_test_1_quadlet",
        "__podman_quadlet_type": "container",
        "__podman_rootless": false
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Check user and group information] *****
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57
Saturday 14 December 2024  11:32:40 -0500 (0:00:00.078)       0:02:36.710 ***** 
included: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1

TASK [fedora.linux_system_roles.podman : Get user information] *****************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2
Saturday 14 December 2024  11:32:41 -0500 (0:00:00.114)       0:02:36.825 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Fail if user does not exist] **********
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9
Saturday 14 December 2024  11:32:41 -0500 (0:00:00.062)       0:02:36.887 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set group for podman user] ************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16
Saturday 14 December 2024  11:32:41 -0500 (0:00:00.113)       0:02:37.001 ***** 
ok: [managed-node1] => {
    "ansible_facts": {
        "__podman_group": "0"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : See if getsubids exists] **************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:28
Saturday 14 December 2024  11:32:41 -0500 (0:00:00.141)       0:02:37.142 ***** 
ok: [managed-node1] => {
    "changed": false,
    "stat": {
        "atime": 1734193880.1628356,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 32,
        "charset": "binary",
        "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97",
        "ctime": 1734193861.1678128,
        "dev": 51714,
        "device_type": 0,
        "executable": true,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 8859182,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "application/x-pie-executable",
        "mode": "0755",
        "mtime": 1730678400.0,
        "nlink": 1,
        "path": "/usr/bin/getsubids",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": true,
        "rusr": true,
        "size": 15744,
        "uid": 0,
        "version": "2878164177",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": true,
        "xoth": true,
        "xusr": true
    }
}

TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:39
Saturday 14 December 2024  11:32:41 -0500 (0:00:00.419)       0:02:37.562 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "__podman_user not in [\"root\", \"0\"]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:44
Saturday 14 December 2024  11:32:41 -0500 (0:00:00.030)       0:02:37.593 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "__podman_user not in [\"root\", \"0\"]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:49
Saturday 14 December 2024  11:32:41 -0500 (0:00:00.032)       0:02:37.625 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "__podman_user not in [\"root\", \"0\"]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Get subuid file] **********************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:62
Saturday 14 December 2024  11:32:41 -0500 (0:00:00.031)       0:02:37.657 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Get subgid file] **********************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:67
Saturday 14 December 2024  11:32:41 -0500 (0:00:00.103)       0:02:37.760 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:72
Saturday 14 December 2024  11:32:42 -0500 (0:00:00.049)       0:02:37.809 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ******
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:82
Saturday 14 December 2024  11:32:42 -0500 (0:00:00.038)       0:02:37.848 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ******
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:89
Saturday 14 December 2024  11:32:42 -0500 (0:00:00.042)       0:02:37.891 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62
Saturday 14 December 2024  11:32:42 -0500 (0:00:00.052)       0:02:37.943 ***** 
ok: [managed-node1] => {
    "ansible_facts": {
        "__podman_activate_systemd_unit": false,
        "__podman_images_found": [
            "localhost:5000/libpod/testimage:20210610"
        ],
        "__podman_kube_yamls_raw": "",
        "__podman_service_name": "auth_test_1_quadlet.service",
        "__podman_systemd_scope": "system",
        "__podman_user_home_dir": "/root",
        "__podman_xdg_runtime_dir": "/run/user/0"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73
Saturday 14 December 2024  11:32:42 -0500 (0:00:00.091)       0:02:38.034 ***** 
ok: [managed-node1] => {
    "ansible_facts": {
        "__podman_quadlet_path": "/etc/containers/systemd"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Get kube yaml contents] ***************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77
Saturday 14 December 2024  11:32:42 -0500 (0:00:00.053)       0:02:38.088 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "__podman_state != \"absent\"",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87
Saturday 14 December 2024  11:32:42 -0500 (0:00:00.048)       0:02:38.136 ***** 
ok: [managed-node1] => {
    "ansible_facts": {
        "__podman_images": [
            "localhost:5000/libpod/testimage:20210610"
        ],
        "__podman_quadlet_file": "/etc/containers/systemd/auth_test_1_quadlet.container",
        "__podman_volumes": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105
Saturday 14 December 2024  11:32:42 -0500 (0:00:00.137)       0:02:38.274 ***** 
ok: [managed-node1] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Cleanup quadlets] *********************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112
Saturday 14 December 2024  11:32:42 -0500 (0:00:00.105)       0:02:38.379 ***** 
included: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed-node1

TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] *****************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4
Saturday 14 December 2024  11:32:42 -0500 (0:00:00.178)       0:02:38.558 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "__podman_rootless | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Stop and disable service] *************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12
Saturday 14 December 2024  11:32:42 -0500 (0:00:00.050)       0:02:38.608 ***** 
ok: [managed-node1] => {
    "changed": false,
    "failed_when_result": false
}

MSG:

Could not find the requested service auth_test_1_quadlet.service: host

TASK [fedora.linux_system_roles.podman : See if quadlet file exists] ***********
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:33
Saturday 14 December 2024  11:32:43 -0500 (0:00:00.607)       0:02:39.216 ***** 
ok: [managed-node1] => {
    "changed": false,
    "stat": {
        "exists": false
    }
}

TASK [fedora.linux_system_roles.podman : Parse quadlet file] *******************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:38
Saturday 14 December 2024  11:32:43 -0500 (0:00:00.483)       0:02:39.700 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "__podman_quadlet_stat.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Remove quadlet file] ******************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42
Saturday 14 December 2024  11:32:43 -0500 (0:00:00.063)       0:02:39.764 ***** 
ok: [managed-node1] => {
    "changed": false,
    "path": "/etc/containers/systemd/auth_test_1_quadlet.container",
    "state": "absent"
}

TASK [fedora.linux_system_roles.podman : Refresh systemd] **********************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:48
Saturday 14 December 2024  11:32:44 -0500 (0:00:00.412)       0:02:40.176 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "__podman_file_removed is changed",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Remove managed resource] **************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:58
Saturday 14 December 2024  11:32:44 -0500 (0:00:00.038)       0:02:40.215 ***** 
skipping: [managed-node1] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Remove volumes] ***********************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99
Saturday 14 December 2024  11:32:44 -0500 (0:00:00.050)       0:02:40.265 ***** 
skipping: [managed-node1] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Clear parsed podman variable] *********
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:116
Saturday 14 December 2024  11:32:44 -0500 (0:00:00.081)       0:02:40.346 ***** 
ok: [managed-node1] => {
    "ansible_facts": {
        "__podman_quadlet_parsed": null
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ********
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:120
Saturday 14 December 2024  11:32:44 -0500 (0:00:00.058)       0:02:40.405 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "podman_prune_images | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Manage linger] ************************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:131
Saturday 14 December 2024  11:32:44 -0500 (0:00:00.048)       0:02:40.453 ***** 
included: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node1

TASK [fedora.linux_system_roles.podman : Enable linger if needed] **************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12
Saturday 14 December 2024  11:32:44 -0500 (0:00:00.169)       0:02:40.622 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "__podman_rootless | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18
Saturday 14 December 2024  11:32:44 -0500 (0:00:00.066)       0:02:40.689 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "__podman_rootless | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22
Saturday 14 December 2024  11:32:44 -0500 (0:00:00.038)       0:02:40.727 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "__podman_rootless | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : For testing and debugging - images] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:141
Saturday 14 December 2024  11:32:44 -0500 (0:00:00.036)       0:02:40.763 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "__podman_test_debug | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : For testing and debugging - volumes] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:150
Saturday 14 December 2024  11:32:45 -0500 (0:00:00.035)       0:02:40.798 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "__podman_test_debug | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : For testing and debugging - containers] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:159
Saturday 14 December 2024  11:32:45 -0500 (0:00:00.044)       0:02:40.843 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "__podman_test_debug | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : For testing and debugging - networks] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:168
Saturday 14 December 2024  11:32:45 -0500 (0:00:00.053)       0:02:40.897 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "__podman_test_debug | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : For testing and debugging - secrets] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:177
Saturday 14 December 2024  11:32:45 -0500 (0:00:00.055)       0:02:40.953 ***** 
skipping: [managed-node1] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [fedora.linux_system_roles.podman : For testing and debugging - pods] *****
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:187
Saturday 14 December 2024  11:32:45 -0500 (0:00:00.056)       0:02:41.009 ***** 
skipping: [managed-node1] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [fedora.linux_system_roles.podman : For testing and debugging - services] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197
Saturday 14 December 2024  11:32:45 -0500 (0:00:00.057)       0:02:41.067 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "__podman_test_debug | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Create and update quadlets] ***********
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116
Saturday 14 December 2024  11:32:45 -0500 (0:00:00.089)       0:02:41.156 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "__podman_state != \"absent\"",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Cancel linger] ************************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:196
Saturday 14 December 2024  11:32:45 -0500 (0:00:00.049)       0:02:41.206 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.podman : Handle credential files - absent] *****
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:202
Saturday 14 December 2024  11:32:45 -0500 (0:00:00.060)       0:02:41.267 ***** 
included: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_credential_files.yml for managed-node1 => (item=(censored due to no_log))

TASK [fedora.linux_system_roles.podman : Set user and group] *******************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_credential_files.yml:3
Saturday 14 December 2024  11:32:45 -0500 (0:00:00.179)       0:02:41.447 ***** 
ok: [managed-node1] => {
    "ansible_facts": {
        "__podman_credential_group": "",
        "__podman_credential_user": "root"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Check user and group information] *****
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_credential_files.yml:9
Saturday 14 December 2024  11:32:45 -0500 (0:00:00.081)       0:02:41.528 ***** 
included: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1

TASK [fedora.linux_system_roles.podman : Get user information] *****************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2
Saturday 14 December 2024  11:32:45 -0500 (0:00:00.173)       0:02:41.702 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Fail if user does not exist] **********
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9
Saturday 14 December 2024  11:32:46 -0500 (0:00:00.130)       0:02:41.832 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set group for podman user] ************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16
Saturday 14 December 2024  11:32:46 -0500 (0:00:00.086)       0:02:41.919 ***** 
ok: [managed-node1] => {
    "ansible_facts": {
        "__podman_group": "0"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : See if getsubids exists] **************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:28
Saturday 14 December 2024  11:32:46 -0500 (0:00:00.218)       0:02:42.138 ***** 
ok: [managed-node1] => {
    "changed": false,
    "stat": {
        "atime": 1734193880.1628356,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 32,
        "charset": "binary",
        "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97",
        "ctime": 1734193861.1678128,
        "dev": 51714,
        "device_type": 0,
        "executable": true,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 8859182,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "application/x-pie-executable",
        "mode": "0755",
        "mtime": 1730678400.0,
        "nlink": 1,
        "path": "/usr/bin/getsubids",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": true,
        "rusr": true,
        "size": 15744,
        "uid": 0,
        "version": "2878164177",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": true,
        "xoth": true,
        "xusr": true
    }
}

TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:39
Saturday 14 December 2024  11:32:46 -0500 (0:00:00.424)       0:02:42.562 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "__podman_user not in [\"root\", \"0\"]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:44
Saturday 14 December 2024  11:32:46 -0500 (0:00:00.049)       0:02:42.611 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "__podman_user not in [\"root\", \"0\"]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:49
Saturday 14 December 2024  11:32:46 -0500 (0:00:00.048)       0:02:42.660 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "__podman_user not in [\"root\", \"0\"]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Get subuid file] **********************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:62
Saturday 14 December 2024  11:32:46 -0500 (0:00:00.052)       0:02:42.713 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Get subgid file] **********************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:67
Saturday 14 December 2024  11:32:47 -0500 (0:00:00.051)       0:02:42.764 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:72
Saturday 14 December 2024  11:32:47 -0500 (0:00:00.055)       0:02:42.820 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ******
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:82
Saturday 14 December 2024  11:32:47 -0500 (0:00:00.059)       0:02:42.879 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ******
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:89
Saturday 14 December 2024  11:32:47 -0500 (0:00:00.088)       0:02:42.968 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set credential variables] *************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_credential_files.yml:15
Saturday 14 December 2024  11:32:47 -0500 (0:00:00.090)       0:02:43.058 ***** 
ok: [managed-node1] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Ensure the credentials directory is present] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_credential_files.yml:30
Saturday 14 December 2024  11:32:47 -0500 (0:00:00.151)       0:02:43.210 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "__podman_credential_item[\"state\"] | d(\"present\") == \"present\"",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Ensure credential file is copied] *****
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_credential_files.yml:38
Saturday 14 December 2024  11:32:47 -0500 (0:00:00.102)       0:02:43.313 ***** 
skipping: [managed-node1] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Ensure credential file content is present] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_credential_files.yml:48
Saturday 14 December 2024  11:32:47 -0500 (0:00:00.111)       0:02:43.424 ***** 
skipping: [managed-node1] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Ensure credential file is absent] *****
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_credential_files.yml:65
Saturday 14 December 2024  11:32:47 -0500 (0:00:00.112)       0:02:43.537 ***** 
ok: [managed-node1] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Find files in credentials directory] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_credential_files.yml:71
Saturday 14 December 2024  11:32:48 -0500 (0:00:00.533)       0:02:44.070 ***** 
ok: [managed-node1] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Ensure the credentials directory is absent if empty] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_credential_files.yml:79
Saturday 14 December 2024  11:32:49 -0500 (0:00:00.740)       0:02:44.811 ***** 
ok: [managed-node1] => {
    "changed": false,
    "path": "/root/.config/containers",
    "state": "absent"
}

TASK [fedora.linux_system_roles.podman : Handle certs.d files - absent] ********
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:211
Saturday 14 December 2024  11:32:49 -0500 (0:00:00.426)       0:02:45.237 ***** 
included: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_certs_d.yml for managed-node1 => (item=(censored due to no_log))

TASK [fedora.linux_system_roles.podman : Check given registry_host] ************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_certs_d.yml:3
Saturday 14 December 2024  11:32:49 -0500 (0:00:00.115)       0:02:45.353 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "__podman_cert_spec_item[\"registry_host\"] is search(\"/\")",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set per-cert spec variables part 0] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_certs_d.yml:10
Saturday 14 December 2024  11:32:49 -0500 (0:00:00.032)       0:02:45.385 ***** 
ok: [managed-node1] => {
    "ansible_facts": {
        "__podman_user": "root"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Set per-cert spec variables part 1] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_certs_d.yml:14
Saturday 14 December 2024  11:32:49 -0500 (0:00:00.035)       0:02:45.421 ***** 
ok: [managed-node1] => {
    "ansible_facts": {
        "__podman_rootless": false
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Check user and group information] *****
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_certs_d.yml:18
Saturday 14 December 2024  11:32:49 -0500 (0:00:00.100)       0:02:45.522 ***** 
included: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1

TASK [fedora.linux_system_roles.podman : Get user information] *****************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2
Saturday 14 December 2024  11:32:49 -0500 (0:00:00.084)       0:02:45.607 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Fail if user does not exist] **********
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9
Saturday 14 December 2024  11:32:49 -0500 (0:00:00.056)       0:02:45.663 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set group for podman user] ************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16
Saturday 14 December 2024  11:32:49 -0500 (0:00:00.060)       0:02:45.724 ***** 
ok: [managed-node1] => {
    "ansible_facts": {
        "__podman_group": "0"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : See if getsubids exists] **************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:28
Saturday 14 December 2024  11:32:50 -0500 (0:00:00.087)       0:02:45.812 ***** 
ok: [managed-node1] => {
    "changed": false,
    "stat": {
        "atime": 1734193880.1628356,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 32,
        "charset": "binary",
        "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97",
        "ctime": 1734193861.1678128,
        "dev": 51714,
        "device_type": 0,
        "executable": true,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 8859182,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "application/x-pie-executable",
        "mode": "0755",
        "mtime": 1730678400.0,
        "nlink": 1,
        "path": "/usr/bin/getsubids",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": true,
        "rusr": true,
        "size": 15744,
        "uid": 0,
        "version": "2878164177",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": true,
        "xoth": true,
        "xusr": true
    }
}

TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:39
Saturday 14 December 2024  11:32:50 -0500 (0:00:00.446)       0:02:46.259 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "__podman_user not in [\"root\", \"0\"]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:44
Saturday 14 December 2024  11:32:50 -0500 (0:00:00.053)       0:02:46.312 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "__podman_user not in [\"root\", \"0\"]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:49
Saturday 14 December 2024  11:32:50 -0500 (0:00:00.063)       0:02:46.376 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "__podman_user not in [\"root\", \"0\"]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Get subuid file] **********************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:62
Saturday 14 December 2024  11:32:50 -0500 (0:00:00.087)       0:02:46.463 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Get subgid file] **********************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:67
Saturday 14 December 2024  11:32:50 -0500 (0:00:00.063)       0:02:46.527 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:72
Saturday 14 December 2024  11:32:50 -0500 (0:00:00.066)       0:02:46.593 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ******
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:82
Saturday 14 December 2024  11:32:50 -0500 (0:00:00.067)       0:02:46.661 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ******
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:89
Saturday 14 December 2024  11:32:50 -0500 (0:00:00.055)       0:02:46.717 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "not __podman_stat_getsubids.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set per-cert spec variables part 2] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_certs_d.yml:23
Saturday 14 December 2024  11:32:51 -0500 (0:00:00.056)       0:02:46.773 ***** 
ok: [managed-node1] => {
    "ansible_facts": {
        "__podman_user_home_dir": "/root"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Set per-cert spec variables part 3] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_certs_d.yml:27
Saturday 14 December 2024  11:32:51 -0500 (0:00:00.049)       0:02:46.822 ***** 
ok: [managed-node1] => {
    "ansible_facts": {
        "__podman_certs_d_path": "/etc/containers/certs.d/localhost:5000"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Set per-cert spec variables part 4] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_certs_d.yml:31
Saturday 14 December 2024  11:32:51 -0500 (0:00:00.051)       0:02:46.874 ***** 
ok: [managed-node1] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Ensure certs.d directory] *************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_certs_d.yml:50
Saturday 14 December 2024  11:32:51 -0500 (0:00:00.080)       0:02:46.955 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "__podman_cert_spec_item[\"state\"] | d(\"present\") == \"present\"",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Ensure certs.d files] *****************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_certs_d.yml:58
Saturday 14 December 2024  11:32:51 -0500 (0:00:00.086)       0:02:47.042 ***** 
skipping: [managed-node1] => (item=None)  => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}
skipping: [managed-node1] => (item=None)  => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}
skipping: [managed-node1] => (item=None)  => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}
skipping: [managed-node1] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Remove certs.d files] *****************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_certs_d.yml:75
Saturday 14 December 2024  11:32:51 -0500 (0:00:00.114)       0:02:47.156 ***** 
ok: [managed-node1] => (item=None) => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}
ok: [managed-node1] => (item=None) => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}
changed: [managed-node1] => (item=None) => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": true
}
changed: [managed-node1] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": true
}

TASK [fedora.linux_system_roles.podman : Find files in certs.d directory] ******
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_certs_d.yml:82
Saturday 14 December 2024  11:32:52 -0500 (0:00:01.222)       0:02:48.379 ***** 
ok: [managed-node1] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Ensure the certs.d directory is absent if empty] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_certs_d.yml:90
Saturday 14 December 2024  11:32:53 -0500 (0:00:00.467)       0:02:48.847 ***** 
changed: [managed-node1] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": true
}

TASK [Remove all container resources - rootless] *******************************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/tests/podman/tests_auth_and_security.yml:228
Saturday 14 December 2024  11:32:53 -0500 (0:00:00.628)       0:02:49.475 ***** 
included: fedora.linux_system_roles.podman for managed-node1

TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:3
Saturday 14 December 2024  11:32:53 -0500 (0:00:00.144)       0:02:49.620 ***** 
included: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml for managed-node1

TASK [fedora.linux_system_roles.podman : Ensure ansible_facts used by role] ****
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:3
Saturday 14 December 2024  11:32:53 -0500 (0:00:00.089)       0:02:49.709 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "__podman_required_facts | difference(ansible_facts.keys() | list) | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Check if system is ostree] ************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:11
Saturday 14 December 2024  11:32:54 -0500 (0:00:00.067)       0:02:49.776 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "not __podman_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set flag to indicate system is ostree] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:16
Saturday 14 December 2024  11:32:54 -0500 (0:00:00.056)       0:02:49.833 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "not __podman_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Check if transactional-update exists in /sbin] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:23
Saturday 14 December 2024  11:32:54 -0500 (0:00:00.040)       0:02:49.873 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "not __podman_is_transactional is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set flag if transactional-update exists] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:28
Saturday 14 December 2024  11:32:54 -0500 (0:00:00.039)       0:02:49.913 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "not __podman_is_transactional is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:32
Saturday 14 December 2024  11:32:54 -0500 (0:00:00.053)       0:02:49.967 ***** 
ok: [managed-node1] => (item=RedHat.yml) => {
    "ansible_facts": {
        "__podman_packages": [
            "podman",
            "shadow-utils-subid"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/vars/RedHat.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "RedHat.yml"
}
skipping: [managed-node1] => (item=CentOS.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "CentOS.yml",
    "skip_reason": "Conditional result was False"
}
ok: [managed-node1] => (item=CentOS_10.yml) => {
    "ansible_facts": {
        "__podman_packages": [
            "iptables-nft",
            "podman",
            "shadow-utils-subid"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_10.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_10.yml"
}
ok: [managed-node1] => (item=CentOS_10.yml) => {
    "ansible_facts": {
        "__podman_packages": [
            "iptables-nft",
            "podman",
            "shadow-utils-subid"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_10.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_10.yml"
}

TASK [fedora.linux_system_roles.podman : Gather the package facts] *************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6
Saturday 14 December 2024  11:32:54 -0500 (0:00:00.105)       0:02:50.072 ***** 
ok: [managed-node1] => {
    "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Enable copr if requested] *************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:10
Saturday 14 December 2024  11:32:55 -0500 (0:00:00.791)       0:02:50.864 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "podman_use_copr | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Ensure required packages are installed] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:14
Saturday 14 December 2024  11:32:55 -0500 (0:00:00.053)       0:02:50.918 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "(__podman_packages | difference(ansible_facts.packages))",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Notify user that reboot is needed to apply changes] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:28
Saturday 14 December 2024  11:32:55 -0500 (0:00:00.059)       0:02:50.977 ***** 
skipping: [managed-node1] => {
    "false_condition": "__podman_is_transactional | d(false)"
}

TASK [fedora.linux_system_roles.podman : Reboot transactional update systems] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:33
Saturday 14 December 2024  11:32:55 -0500 (0:00:00.051)       0:02:51.029 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "__podman_is_transactional | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Fail if reboot is needed and not set] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:38
Saturday 14 December 2024  11:32:55 -0500 (0:00:00.053)       0:02:51.082 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "__podman_is_transactional | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Get podman version] *******************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:46
Saturday 14 December 2024  11:32:55 -0500 (0:00:00.052)       0:02:51.135 ***** 
ok: [managed-node1] => {
    "changed": false,
    "cmd": [
        "podman",
        "--version"
    ],
    "delta": "0:00:00.023917",
    "end": "2024-12-14 11:32:55.732122",
    "rc": 0,
    "start": "2024-12-14 11:32:55.708205"
}

STDOUT:

podman version 5.3.1


STDERR:

time="2024-12-14T11:32:55-05:00" level=warning msg="Failed to decode the keys [\"storage.options.overlay.pull_options\" \"storage.options.overlay.pull_options\" \"storage.options.overlay.pull_options\" \"storage.options.overlay.pull_options.enable_partial_images\" \"storage.options.overlay.pull_options.use_hard_links\" \"storage.options.overlay.pull_options.ostree_repos\" \"storage.options.overlay.pull_options.convert_images\"] from \"/usr/share/containers/storage.conf\""

TASK [fedora.linux_system_roles.podman : Set podman version] *******************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:52
Saturday 14 December 2024  11:32:55 -0500 (0:00:00.529)       0:02:51.664 ***** 
ok: [managed-node1] => {
    "ansible_facts": {
        "podman_version": "5.3.1"
    },
    "changed": false
}

TASK [fedora.linux_system_roles.podman : Podman package version must be 4.2 or later] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:56
Saturday 14 December 2024  11:32:55 -0500 (0:00:00.067)       0:02:51.732 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "podman_version is version(\"4.2\", \"<\")",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:63
Saturday 14 December 2024  11:32:56 -0500 (0:00:00.061)       0:02:51.795 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "podman_version is version(\"4.4\", \"<\")",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:73
Saturday 14 December 2024  11:32:56 -0500 (0:00:00.102)       0:02:51.897 ***** 
META: end_host conditional evaluated to False, continuing execution for managed-node1
skipping: [managed-node1] => {
    "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node1"
}

MSG:

end_host conditional evaluated to false, continuing execution for managed-node1

TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:80
Saturday 14 December 2024  11:32:56 -0500 (0:00:00.075)       0:02:51.973 ***** 
skipping: [managed-node1] => {
    "changed": false,
    "false_condition": "__has_type_pod or __has_pod_file_ext or __has_pod_file_src_ext or __has_pod_template_src_ext or __has_pod_template_src_ext_j2",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] ***
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:96
Saturday 14 December 2024  11:32:56 -0500 (0:00:00.106)       0:02:52.080 ***** 
META: end_host conditional evaluated to False, continuing execution for managed-node1
skipping: [managed-node1] => {
    "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node1"
}

MSG:

end_host conditional evaluated to false, continuing execution for managed-node1

TASK [fedora.linux_system_roles.podman : Check user and group information] *****
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:109
Saturday 14 December 2024  11:32:56 -0500 (0:00:00.092)       0:02:52.172 ***** 
included: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1

TASK [fedora.linux_system_roles.podman : Get user information] *****************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2
Saturday 14 December 2024  11:32:56 -0500 (0:00:00.126)       0:02:52.298 ***** 
ok: [managed-node1] => {
    "ansible_facts": {
        "getent_passwd": {
            "auth_test_user1": null
        }
    },
    "changed": false
}

MSG:

One or more supplied key could not be found in the database.

TASK [fedora.linux_system_roles.podman : Fail if user does not exist] **********
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9
Saturday 14 December 2024  11:32:56 -0500 (0:00:00.436)       0:02:52.735 ***** 
fatal: [managed-node1]: FAILED! => {
    "changed": false
}

MSG:

The given podman user [auth_test_user1] does not exist - cannot continue


TASK [Check journal for rootless cleanup errors] *******************************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/tests/podman/tests_auth_and_security.yml:243
Saturday 14 December 2024  11:32:57 -0500 (0:00:00.069)       0:02:52.804 ***** 
ok: [managed-node1] => {
    "changed": false,
    "cmd": [
        "journalctl",
        "-ex"
    ],
    "delta": "0:00:00.027442",
    "end": "2024-12-14 11:32:57.399515",
    "rc": 0,
    "start": "2024-12-14 11:32:57.372073"
}

STDOUT:

Dec 14 11:26:27 localhost systemd[1]: Stopped dracut-pre-udev.service - dracut pre-udev hook.
░░ Subject: A stop job for unit dracut-pre-udev.service has finished
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A stop job for unit dracut-pre-udev.service has finished.
░░ 
░░ The job identifier is 105 and the job result is done.
Dec 14 11:26:27 localhost systemd[1]: dracut-cmdline.service: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ The unit dracut-cmdline.service has successfully entered the 'dead' state.
Dec 14 11:26:27 localhost systemd[1]: Stopped dracut-cmdline.service - dracut cmdline hook.
░░ Subject: A stop job for unit dracut-cmdline.service has finished
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A stop job for unit dracut-cmdline.service has finished.
░░ 
░░ The job identifier is 113 and the job result is done.
Dec 14 11:26:27 localhost systemd[1]: Starting initrd-udevadm-cleanup-db.service - Cleanup udev Database...
░░ Subject: A start job for unit initrd-udevadm-cleanup-db.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit initrd-udevadm-cleanup-db.service has begun execution.
░░ 
░░ The job identifier is 68.
Dec 14 11:26:27 localhost systemd[1]: systemd-tmpfiles-setup-dev.service: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ The unit systemd-tmpfiles-setup-dev.service has successfully entered the 'dead' state.
Dec 14 11:26:27 localhost systemd[1]: Stopped systemd-tmpfiles-setup-dev.service - Create Static Device Nodes in /dev.
░░ Subject: A stop job for unit systemd-tmpfiles-setup-dev.service has finished
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A stop job for unit systemd-tmpfiles-setup-dev.service has finished.
░░ 
░░ The job identifier is 95 and the job result is done.
Dec 14 11:26:27 localhost systemd[1]: systemd-sysusers.service: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ The unit systemd-sysusers.service has successfully entered the 'dead' state.
Dec 14 11:26:27 localhost systemd[1]: Stopped systemd-sysusers.service - Create System Users.
░░ Subject: A stop job for unit systemd-sysusers.service has finished
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A stop job for unit systemd-sysusers.service has finished.
░░ 
░░ The job identifier is 97 and the job result is done.
Dec 14 11:26:27 localhost systemd[1]: systemd-tmpfiles-setup-dev-early.service: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ The unit systemd-tmpfiles-setup-dev-early.service has successfully entered the 'dead' state.
Dec 14 11:26:27 localhost systemd[1]: Stopped systemd-tmpfiles-setup-dev-early.service - Create Static Device Nodes in /dev gracefully.
░░ Subject: A stop job for unit systemd-tmpfiles-setup-dev-early.service has finished
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A stop job for unit systemd-tmpfiles-setup-dev-early.service has finished.
░░ 
░░ The job identifier is 93 and the job result is done.
Dec 14 11:26:27 localhost systemd[1]: kmod-static-nodes.service: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ The unit kmod-static-nodes.service has successfully entered the 'dead' state.
Dec 14 11:26:27 localhost systemd[1]: Stopped kmod-static-nodes.service - Create List of Static Device Nodes.
░░ Subject: A stop job for unit kmod-static-nodes.service has finished
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A stop job for unit kmod-static-nodes.service has finished.
░░ 
░░ The job identifier is 110 and the job result is done.
Dec 14 11:26:27 localhost systemd[1]: initrd-udevadm-cleanup-db.service: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ The unit initrd-udevadm-cleanup-db.service has successfully entered the 'dead' state.
Dec 14 11:26:27 localhost systemd[1]: Finished initrd-udevadm-cleanup-db.service - Cleanup udev Database.
░░ Subject: A start job for unit initrd-udevadm-cleanup-db.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit initrd-udevadm-cleanup-db.service has finished successfully.
░░ 
░░ The job identifier is 68.
Dec 14 11:26:27 localhost systemd[1]: Reached target initrd-switch-root.target - Switch Root.
░░ Subject: A start job for unit initrd-switch-root.target has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit initrd-switch-root.target has finished successfully.
░░ 
░░ The job identifier is 67.
Dec 14 11:26:27 localhost systemd[1]: Starting initrd-switch-root.service - Switch Root...
░░ Subject: A start job for unit initrd-switch-root.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit initrd-switch-root.service has begun execution.
░░ 
░░ The job identifier is 82.
Dec 14 11:26:27 localhost systemd[1]: Switching root.
Dec 14 11:26:27 localhost systemd-journald[259]: Journal stopped
░░ Subject: The journal has been stopped
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ The system journal process has shut down and closed all currently
░░ active journal files.
Dec 14 11:26:32 localhost systemd-journald[259]: Received SIGTERM from PID 1 (systemd).
Dec 14 11:26:32 localhost kernel: audit: type=1404 audit(1734193588.634:2): enforcing=1 old_enforcing=0 auid=4294967295 ses=4294967295 enabled=1 old-enabled=1 lsm=selinux res=1
Dec 14 11:26:32 localhost kernel: SELinux:  policy capability network_peer_controls=1
Dec 14 11:26:32 localhost kernel: SELinux:  policy capability open_perms=1
Dec 14 11:26:32 localhost kernel: SELinux:  policy capability extended_socket_class=1
Dec 14 11:26:32 localhost kernel: SELinux:  policy capability always_check_network=0
Dec 14 11:26:32 localhost kernel: SELinux:  policy capability cgroup_seclabel=1
Dec 14 11:26:32 localhost kernel: SELinux:  policy capability nnp_nosuid_transition=1
Dec 14 11:26:32 localhost kernel: SELinux:  policy capability genfs_seclabel_symlinks=1
Dec 14 11:26:32 localhost kernel: SELinux:  policy capability ioctl_skip_cloexec=0
Dec 14 11:26:32 localhost kernel: SELinux:  policy capability userspace_initial_context=0
Dec 14 11:26:32 localhost kernel: audit: type=1403 audit(1734193588.797:3): auid=4294967295 ses=4294967295 lsm=selinux res=1
Dec 14 11:26:32 localhost systemd[1]: Successfully loaded SELinux policy in 201.101ms.
Dec 14 11:26:32 localhost systemd[1]: Relabeled /dev/, /dev/shm/, /run/ in 17.621ms.
Dec 14 11:26:32 localhost systemd[1]: systemd 256-16.el10 running in system mode (+PAM +AUDIT +SELINUX -APPARMOR +IMA +SMACK +SECCOMP -GCRYPT -GNUTLS +OPENSSL +ACL +BLKID +CURL +ELFUTILS +FIDO2 +IDN2 -IDN -IPTC +KMOD +LIBCRYPTSETUP +LIBCRYPTSETUP_PLUGINS +LIBFDISK +PCRE2 +PWQUALITY +P11KIT -QRENCODE +TPM2 +BZIP2 +LZ4 +XZ +ZLIB +ZSTD +BPF_FRAMEWORK +XKBCOMMON +UTMP +SYSVINIT +LIBARCHIVE)
Dec 14 11:26:32 localhost systemd[1]: Detected virtualization xen.
Dec 14 11:26:32 localhost systemd[1]: Detected architecture x86-64.
Dec 14 11:26:32 localhost systemd[1]: Initializing machine ID from VM UUID.
Dec 14 11:26:32 localhost systemd[1]: Installed transient /etc/machine-id file.
Dec 14 11:26:32 localhost systemd[1]: bpf-restrict-fs: LSM BPF program attached
Dec 14 11:26:32 localhost systemd[1]: run-credentials-systemd\x2djournald.service.mount: Deactivated successfully.
Dec 14 11:26:32 localhost systemd[1]: initrd-switch-root.service: Deactivated successfully.
Dec 14 11:26:32 localhost systemd[1]: Stopped initrd-switch-root.service - Switch Root.
Dec 14 11:26:32 localhost systemd[1]: systemd-journald.service: Scheduled restart job, restart counter is at 1.
Dec 14 11:26:32 localhost systemd[1]: Created slice system-getty.slice - Slice /system/getty.
Dec 14 11:26:32 localhost systemd[1]: Created slice system-serial\x2dgetty.slice - Slice /system/serial-getty.
Dec 14 11:26:32 localhost systemd[1]: Created slice system-sshd\x2dkeygen.slice - Slice /system/sshd-keygen.
Dec 14 11:26:32 localhost systemd[1]: Created slice user.slice - User and Session Slice.
Dec 14 11:26:32 localhost systemd[1]: Started systemd-ask-password-console.path - Dispatch Password Requests to Console Directory Watch.
Dec 14 11:26:32 localhost systemd[1]: Started systemd-ask-password-wall.path - Forward Password Requests to Wall Directory Watch.
Dec 14 11:26:32 localhost systemd[1]: Set up automount proc-sys-fs-binfmt_misc.automount - Arbitrary Executable File Formats File System Automount Point.
Dec 14 11:26:32 localhost systemd[1]: Expecting device dev-ttyS0.device - /dev/ttyS0...
Dec 14 11:26:32 localhost systemd[1]: Reached target cryptsetup.target - Local Encrypted Volumes.
Dec 14 11:26:32 localhost systemd[1]: Stopped target initrd-switch-root.target - Switch Root.
Dec 14 11:26:32 localhost systemd[1]: Stopped target initrd-fs.target - Initrd File Systems.
Dec 14 11:26:32 localhost systemd[1]: Stopped target initrd-root-fs.target - Initrd Root File System.
Dec 14 11:26:32 localhost systemd[1]: Reached target integritysetup.target - Local Integrity Protected Volumes.
Dec 14 11:26:32 localhost systemd[1]: Reached target paths.target - Path Units.
Dec 14 11:26:32 localhost systemd[1]: Reached target slices.target - Slice Units.
Dec 14 11:26:32 localhost systemd[1]: Reached target swap.target - Swaps.
Dec 14 11:26:32 localhost systemd[1]: Reached target veritysetup.target - Local Verity Protected Volumes.
Dec 14 11:26:32 localhost systemd[1]: Listening on dm-event.socket - Device-mapper event daemon FIFOs.
Dec 14 11:26:32 localhost systemd[1]: Listening on lvm2-lvmpolld.socket - LVM2 poll daemon socket.
Dec 14 11:26:32 localhost systemd[1]: Listening on rpcbind.socket - RPCbind Server Activation Socket.
Dec 14 11:26:32 localhost systemd[1]: Reached target rpcbind.target - RPC Port Mapper.
Dec 14 11:26:32 localhost systemd[1]: Listening on systemd-coredump.socket - Process Core Dump Socket.
Dec 14 11:26:32 localhost systemd[1]: Listening on systemd-creds.socket - Credential Encryption/Decryption.
Dec 14 11:26:32 localhost systemd[1]: Listening on systemd-initctl.socket - initctl Compatibility Named Pipe.
Dec 14 11:26:32 localhost systemd[1]: systemd-pcrextend.socket - TPM PCR Measurements was skipped because of an unmet condition check (ConditionSecurity=measured-uki).
Dec 14 11:26:32 localhost systemd[1]: systemd-pcrlock.socket - Make TPM PCR Policy was skipped because of an unmet condition check (ConditionSecurity=measured-uki).
Dec 14 11:26:32 localhost systemd[1]: Listening on systemd-udevd-control.socket - udev Control Socket.
Dec 14 11:26:32 localhost systemd[1]: Listening on systemd-udevd-kernel.socket - udev Kernel Socket.
Dec 14 11:26:32 localhost systemd[1]: Mounting dev-hugepages.mount - Huge Pages File System...
Dec 14 11:26:32 localhost systemd[1]: Mounting dev-mqueue.mount - POSIX Message Queue File System...
Dec 14 11:26:32 localhost systemd[1]: Mounting sys-kernel-debug.mount - Kernel Debug File System...
Dec 14 11:26:32 localhost systemd[1]: Mounting sys-kernel-tracing.mount - Kernel Trace File System...
Dec 14 11:26:32 localhost systemd[1]: auth-rpcgss-module.service - Kernel Module supporting RPCSEC_GSS was skipped because of an unmet condition check (ConditionPathExists=/etc/krb5.keytab).
Dec 14 11:26:32 localhost systemd[1]: fips-crypto-policy-overlay.service - Bind-mount FIPS crypto-policy in FIPS mode was skipped because of an unmet condition check (ConditionKernelCommandLine=fips=1).
Dec 14 11:26:32 localhost systemd[1]: Starting kmod-static-nodes.service - Create List of Static Device Nodes...
Dec 14 11:26:32 localhost systemd[1]: Starting lvm2-monitor.service - Monitoring of LVM2 mirrors, snapshots etc. using dmeventd or progress polling...
Dec 14 11:26:32 localhost systemd[1]: Starting modprobe@configfs.service - Load Kernel Module configfs...
Dec 14 11:26:32 localhost systemd[1]: Starting modprobe@dm_mod.service - Load Kernel Module dm_mod...
Dec 14 11:26:32 localhost systemd[1]: Starting modprobe@drm.service - Load Kernel Module drm...
Dec 14 11:26:32 localhost systemd[1]: Starting modprobe@efi_pstore.service - Load Kernel Module efi_pstore...
Dec 14 11:26:32 localhost systemd[1]: Starting modprobe@fuse.service - Load Kernel Module fuse...
Dec 14 11:26:32 localhost systemd[1]: Starting modprobe@loop.service - Load Kernel Module loop...
Dec 14 11:26:32 localhost systemd[1]: systemd-fsck-root.service: Deactivated successfully.
Dec 14 11:26:32 localhost systemd[1]: Stopped systemd-fsck-root.service - File System Check on Root Device.
Dec 14 11:26:32 localhost systemd[1]: systemd-hibernate-clear.service - Clear Stale Hibernate Storage Info was skipped because of an unmet condition check (ConditionPathExists=/sys/firmware/efi/efivars/HibernateLocation-8cf2644b-4b0b-428f-9387-6d876050dc67).
Dec 14 11:26:32 localhost systemd[1]: Starting systemd-journald.service - Journal Service...
Dec 14 11:26:32 localhost kernel: loop: module loaded
Dec 14 11:26:32 localhost kernel: device-mapper: core: CONFIG_IMA_DISABLE_HTABLE is disabled. Duplicate IMA measurements will not be recorded in the IMA log.
Dec 14 11:26:32 localhost kernel: device-mapper: uevent: version 1.0.3
Dec 14 11:26:32 localhost systemd[1]: systemd-modules-load.service - Load Kernel Modules was skipped because no trigger condition checks were met.
Dec 14 11:26:32 localhost kernel: device-mapper: ioctl: 4.48.0-ioctl (2023-03-01) initialised: dm-devel@lists.linux.dev
Dec 14 11:26:32 localhost kernel: fuse: init (API version 7.41)
Dec 14 11:26:32 localhost systemd[1]: Starting systemd-network-generator.service - Generate network units from Kernel command line...
Dec 14 11:26:32 localhost systemd[1]: systemd-pcrmachine.service - TPM PCR Machine ID Measurement was skipped because of an unmet condition check (ConditionSecurity=measured-uki).
Dec 14 11:26:32 localhost systemd[1]: Starting systemd-remount-fs.service - Remount Root and Kernel File Systems...
Dec 14 11:26:32 localhost systemd[1]: Starting systemd-sysctl.service - Apply Kernel Variables...
Dec 14 11:26:32 localhost systemd[1]: systemd-tpm2-setup-early.service - Early TPM SRK Setup was skipped because of an unmet condition check (ConditionSecurity=measured-uki).
Dec 14 11:26:32 localhost systemd[1]: Starting systemd-udev-load-credentials.service - Load udev Rules from Credentials...
Dec 14 11:26:32 localhost systemd-journald[523]: Collecting audit messages is disabled.
Dec 14 11:26:32 localhost systemd[1]: Starting systemd-udev-trigger.service - Coldplug All udev Devices...
Dec 14 11:26:32 localhost systemd[1]: Mounted dev-hugepages.mount - Huge Pages File System.
Dec 14 11:26:32 localhost systemd[1]: Mounted dev-mqueue.mount - POSIX Message Queue File System.
Dec 14 11:26:32 localhost systemd[1]: Mounted sys-kernel-debug.mount - Kernel Debug File System.
Dec 14 11:26:32 localhost systemd[1]: Mounted sys-kernel-tracing.mount - Kernel Trace File System.
Dec 14 11:26:32 localhost systemd[1]: Finished kmod-static-nodes.service - Create List of Static Device Nodes.
Dec 14 11:26:32 localhost systemd[1]: modprobe@configfs.service: Deactivated successfully.
Dec 14 11:26:32 localhost systemd[1]: Finished modprobe@configfs.service - Load Kernel Module configfs.
Dec 14 11:26:32 localhost systemd[1]: modprobe@dm_mod.service: Deactivated successfully.
Dec 14 11:26:32 localhost systemd[1]: Finished modprobe@dm_mod.service - Load Kernel Module dm_mod.
Dec 14 11:26:32 localhost systemd[1]: modprobe@drm.service: Deactivated successfully.
Dec 14 11:26:32 localhost systemd[1]: Finished modprobe@drm.service - Load Kernel Module drm.
Dec 14 11:26:32 localhost systemd[1]: modprobe@efi_pstore.service: Deactivated successfully.
Dec 14 11:26:32 localhost systemd[1]: Finished modprobe@efi_pstore.service - Load Kernel Module efi_pstore.
Dec 14 11:26:32 localhost systemd[1]: modprobe@fuse.service: Deactivated successfully.
Dec 14 11:26:32 localhost systemd[1]: Finished modprobe@fuse.service - Load Kernel Module fuse.
Dec 14 11:26:32 localhost systemd[1]: modprobe@loop.service: Deactivated successfully.
Dec 14 11:26:32 localhost systemd[1]: Finished modprobe@loop.service - Load Kernel Module loop.
Dec 14 11:26:32 localhost systemd[1]: Finished systemd-network-generator.service - Generate network units from Kernel command line.
Dec 14 11:26:32 localhost systemd[1]: Finished systemd-remount-fs.service - Remount Root and Kernel File Systems.
Dec 14 11:26:32 localhost systemd-journald[523]: Journal started
░░ Subject: The journal has been started
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ The system journal process has started up, opened the journal
░░ files for writing and is now ready to process requests.
Dec 14 11:26:32 localhost systemd-journald[523]: Runtime Journal (/run/log/journal/ec2a8c5f24a2db6683c20bfae8cc5947) is 8M, max 70.5M, 62.5M free.
░░ Subject: Disk space used by the journal
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ Runtime Journal (/run/log/journal/ec2a8c5f24a2db6683c20bfae8cc5947) is currently using 8M.
░░ Maximum allowed usage is set to 70.5M.
░░ Leaving at least 35.2M free (of currently available 689.3M of disk space).
░░ Enforced usage limit is thus 70.5M, of which 62.5M are still available.
░░ 
░░ The limits controlling how much disk space is used by the journal may
░░ be configured with SystemMaxUse=, SystemKeepFree=, SystemMaxFileSize=,
░░ RuntimeMaxUse=, RuntimeKeepFree=, RuntimeMaxFileSize= settings in
░░ /etc/systemd/journald.conf. See journald.conf(5) for details.
Dec 14 11:26:31 localhost systemd[1]: Queued start job for default target multi-user.target.
Dec 14 11:26:31 localhost systemd[1]: systemd-journald.service: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ The unit systemd-journald.service has successfully entered the 'dead' state.
Dec 14 11:26:32 localhost systemd[1]: Started systemd-journald.service - Journal Service.
Dec 14 11:26:32 localhost systemd[1]: Finished systemd-sysctl.service - Apply Kernel Variables.
░░ Subject: A start job for unit systemd-sysctl.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit systemd-sysctl.service has finished successfully.
░░ 
░░ The job identifier is 181.
Dec 14 11:26:32 localhost systemd[1]: systemd-hwdb-update.service - Rebuild Hardware Database was skipped because of an unmet condition check (ConditionNeedsUpdate=/etc).
░░ Subject: A start job for unit systemd-hwdb-update.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit systemd-hwdb-update.service has finished successfully.
░░ 
░░ The job identifier is 177.
Dec 14 11:26:32 localhost systemd[1]: Starting systemd-journal-flush.service - Flush Journal to Persistent Storage...
░░ Subject: A start job for unit systemd-journal-flush.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit systemd-journal-flush.service has begun execution.
░░ 
░░ The job identifier is 152.
Dec 14 11:26:32 localhost systemd[1]: systemd-pstore.service - Platform Persistent Storage Archival was skipped because of an unmet condition check (ConditionDirectoryNotEmpty=/sys/fs/pstore).
░░ Subject: A start job for unit systemd-pstore.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit systemd-pstore.service has finished successfully.
░░ 
░░ The job identifier is 147.
Dec 14 11:26:32 localhost systemd[1]: Starting systemd-random-seed.service - Load/Save OS Random Seed...
░░ Subject: A start job for unit systemd-random-seed.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit systemd-random-seed.service has begun execution.
░░ 
░░ The job identifier is 137.
Dec 14 11:26:32 localhost systemd[1]: systemd-repart.service - Repartition Root Disk was skipped because no trigger condition checks were met.
░░ Subject: A start job for unit systemd-repart.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit systemd-repart.service has finished successfully.
░░ 
░░ The job identifier is 160.
Dec 14 11:26:32 localhost systemd[1]: Starting systemd-tmpfiles-setup-dev-early.service - Create Static Device Nodes in /dev gracefully...
░░ Subject: A start job for unit systemd-tmpfiles-setup-dev-early.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit systemd-tmpfiles-setup-dev-early.service has begun execution.
░░ 
░░ The job identifier is 198.
Dec 14 11:26:32 localhost systemd[1]: systemd-tpm2-setup.service - TPM SRK Setup was skipped because of an unmet condition check (ConditionSecurity=measured-uki).
░░ Subject: A start job for unit systemd-tpm2-setup.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit systemd-tpm2-setup.service has finished successfully.
░░ 
░░ The job identifier is 151.
Dec 14 11:26:32 localhost systemd[1]: Finished systemd-udev-load-credentials.service - Load udev Rules from Credentials.
░░ Subject: A start job for unit systemd-udev-load-credentials.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit systemd-udev-load-credentials.service has finished successfully.
░░ 
░░ The job identifier is 173.
Dec 14 11:26:32 localhost systemd[1]: Finished systemd-random-seed.service - Load/Save OS Random Seed.
░░ Subject: A start job for unit systemd-random-seed.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit systemd-random-seed.service has finished successfully.
░░ 
░░ The job identifier is 137.
Dec 14 11:26:32 localhost systemd[1]: Finished lvm2-monitor.service - Monitoring of LVM2 mirrors, snapshots etc. using dmeventd or progress polling.
░░ Subject: A start job for unit lvm2-monitor.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit lvm2-monitor.service has finished successfully.
░░ 
░░ The job identifier is 186.
Dec 14 11:26:32 localhost systemd-journald[523]: Runtime Journal (/run/log/journal/ec2a8c5f24a2db6683c20bfae8cc5947) is 8M, max 70.5M, 62.5M free.
░░ Subject: Disk space used by the journal
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ Runtime Journal (/run/log/journal/ec2a8c5f24a2db6683c20bfae8cc5947) is currently using 8M.
░░ Maximum allowed usage is set to 70.5M.
░░ Leaving at least 35.2M free (of currently available 689.3M of disk space).
░░ Enforced usage limit is thus 70.5M, of which 62.5M are still available.
░░ 
░░ The limits controlling how much disk space is used by the journal may
░░ be configured with SystemMaxUse=, SystemKeepFree=, SystemMaxFileSize=,
░░ RuntimeMaxUse=, RuntimeKeepFree=, RuntimeMaxFileSize= settings in
░░ /etc/systemd/journald.conf. See journald.conf(5) for details.
Dec 14 11:26:32 localhost systemd-journald[523]: Received client request to flush runtime journal.
Dec 14 11:26:32 localhost systemd[1]: Finished systemd-journal-flush.service - Flush Journal to Persistent Storage.
░░ Subject: A start job for unit systemd-journal-flush.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit systemd-journal-flush.service has finished successfully.
░░ 
░░ The job identifier is 152.
Dec 14 11:26:32 localhost systemd[1]: Finished systemd-udev-trigger.service - Coldplug All udev Devices.
░░ Subject: A start job for unit systemd-udev-trigger.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit systemd-udev-trigger.service has finished successfully.
░░ 
░░ The job identifier is 185.
Dec 14 11:26:32 localhost systemd[1]: Finished systemd-tmpfiles-setup-dev-early.service - Create Static Device Nodes in /dev gracefully.
░░ Subject: A start job for unit systemd-tmpfiles-setup-dev-early.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit systemd-tmpfiles-setup-dev-early.service has finished successfully.
░░ 
░░ The job identifier is 198.
Dec 14 11:26:32 localhost systemd[1]: systemd-sysusers.service - Create System Users was skipped because no trigger condition checks were met.
░░ Subject: A start job for unit systemd-sysusers.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit systemd-sysusers.service has finished successfully.
░░ 
░░ The job identifier is 182.
Dec 14 11:26:32 localhost systemd[1]: Starting systemd-tmpfiles-setup-dev.service - Create Static Device Nodes in /dev...
░░ Subject: A start job for unit systemd-tmpfiles-setup-dev.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit systemd-tmpfiles-setup-dev.service has begun execution.
░░ 
░░ The job identifier is 145.
Dec 14 11:26:32 localhost systemd[1]: Finished systemd-tmpfiles-setup-dev.service - Create Static Device Nodes in /dev.
░░ Subject: A start job for unit systemd-tmpfiles-setup-dev.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit systemd-tmpfiles-setup-dev.service has finished successfully.
░░ 
░░ The job identifier is 145.
Dec 14 11:26:32 localhost systemd[1]: Reached target local-fs-pre.target - Preparation for Local File Systems.
░░ Subject: A start job for unit local-fs-pre.target has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit local-fs-pre.target has finished successfully.
░░ 
░░ The job identifier is 144.
Dec 14 11:26:32 localhost systemd[1]: Reached target local-fs.target - Local File Systems.
░░ Subject: A start job for unit local-fs.target has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit local-fs.target has finished successfully.
░░ 
░░ The job identifier is 142.
Dec 14 11:26:32 localhost systemd[1]: Listening on systemd-bootctl.socket - Boot Entries Service Socket.
░░ Subject: A start job for unit systemd-bootctl.socket has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit systemd-bootctl.socket has finished successfully.
░░ 
░░ The job identifier is 213.
Dec 14 11:26:32 localhost systemd[1]: Listening on systemd-sysext.socket - System Extension Image Management.
░░ Subject: A start job for unit systemd-sysext.socket has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit systemd-sysext.socket has finished successfully.
░░ 
░░ The job identifier is 220.
Dec 14 11:26:32 localhost systemd[1]: ldconfig.service - Rebuild Dynamic Linker Cache was skipped because no trigger condition checks were met.
░░ Subject: A start job for unit ldconfig.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit ldconfig.service has finished successfully.
░░ 
░░ The job identifier is 146.
Dec 14 11:26:32 localhost systemd[1]: selinux-autorelabel-mark.service - Mark the need to relabel after reboot was skipped because of an unmet condition check (ConditionSecurity=!selinux).
░░ Subject: A start job for unit selinux-autorelabel-mark.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit selinux-autorelabel-mark.service has finished successfully.
░░ 
░░ The job identifier is 190.
Dec 14 11:26:32 localhost systemd[1]: systemd-binfmt.service - Set Up Additional Binary Formats was skipped because no trigger condition checks were met.
░░ Subject: A start job for unit systemd-binfmt.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit systemd-binfmt.service has finished successfully.
░░ 
░░ The job identifier is 193.
Dec 14 11:26:32 localhost systemd[1]: systemd-boot-random-seed.service - Update Boot Loader Random Seed was skipped because no trigger condition checks were met.
░░ Subject: A start job for unit systemd-boot-random-seed.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit systemd-boot-random-seed.service has finished successfully.
░░ 
░░ The job identifier is 179.
Dec 14 11:26:32 localhost systemd[1]: systemd-confext.service - Merge System Configuration Images into /etc/ was skipped because no trigger condition checks were met.
░░ Subject: A start job for unit systemd-confext.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit systemd-confext.service has finished successfully.
░░ 
░░ The job identifier is 157.
Dec 14 11:26:32 localhost systemd[1]: systemd-sysext.service - Merge System Extension Images into /usr/ and /opt/ was skipped because no trigger condition checks were met.
░░ Subject: A start job for unit systemd-sysext.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit systemd-sysext.service has finished successfully.
░░ 
░░ The job identifier is 189.
Dec 14 11:26:32 localhost systemd[1]: Starting systemd-tmpfiles-setup.service - Create System Files and Directories...
░░ Subject: A start job for unit systemd-tmpfiles-setup.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit systemd-tmpfiles-setup.service has begun execution.
░░ 
░░ The job identifier is 139.
Dec 14 11:26:32 localhost systemd[1]: Starting systemd-udevd.service - Rule-based Manager for Device Events and Files...
░░ Subject: A start job for unit systemd-udevd.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit systemd-udevd.service has begun execution.
░░ 
░░ The job identifier is 172.
Dec 14 11:26:32 localhost systemd-udevd[562]: Using default interface naming scheme 'rhel-10.0-beta'.
Dec 14 11:26:32 localhost systemd[1]: Started systemd-udevd.service - Rule-based Manager for Device Events and Files.
░░ Subject: A start job for unit systemd-udevd.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit systemd-udevd.service has finished successfully.
░░ 
░░ The job identifier is 172.
Dec 14 11:26:32 localhost systemd[1]: Starting modprobe@fuse.service - Load Kernel Module fuse...
░░ Subject: A start job for unit modprobe@fuse.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit modprobe@fuse.service has begun execution.
░░ 
░░ The job identifier is 294.
Dec 14 11:26:32 localhost systemd[1]: Starting modprobe@configfs.service - Load Kernel Module configfs...
░░ Subject: A start job for unit modprobe@configfs.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit modprobe@configfs.service has begun execution.
░░ 
░░ The job identifier is 302.
Dec 14 11:26:32 localhost systemd[1]: Finished systemd-tmpfiles-setup.service - Create System Files and Directories.
░░ Subject: A start job for unit systemd-tmpfiles-setup.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit systemd-tmpfiles-setup.service has finished successfully.
░░ 
░░ The job identifier is 139.
Dec 14 11:26:32 localhost systemd[1]: modprobe@fuse.service: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ The unit modprobe@fuse.service has successfully entered the 'dead' state.
Dec 14 11:26:32 localhost systemd[1]: Finished modprobe@fuse.service - Load Kernel Module fuse.
░░ Subject: A start job for unit modprobe@fuse.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit modprobe@fuse.service has finished successfully.
░░ 
░░ The job identifier is 294.
Dec 14 11:26:32 localhost systemd[1]: modprobe@configfs.service: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ The unit modprobe@configfs.service has successfully entered the 'dead' state.
Dec 14 11:26:32 localhost systemd[1]: Finished modprobe@configfs.service - Load Kernel Module configfs.
░░ Subject: A start job for unit modprobe@configfs.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit modprobe@configfs.service has finished successfully.
░░ 
░░ The job identifier is 302.
Dec 14 11:26:32 localhost systemd[1]: Starting audit-rules.service - Load Audit Rules...
░░ Subject: A start job for unit audit-rules.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit audit-rules.service has begun execution.
░░ 
░░ The job identifier is 236.
Dec 14 11:26:32 localhost systemd[1]: Starting rpcbind.service - RPC Bind...
░░ Subject: A start job for unit rpcbind.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit rpcbind.service has begun execution.
░░ 
░░ The job identifier is 253.
Dec 14 11:26:32 localhost systemd[1]: systemd-firstboot.service - First Boot Wizard was skipped because of an unmet condition check (ConditionFirstBoot=yes).
░░ Subject: A start job for unit systemd-firstboot.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit systemd-firstboot.service has finished successfully.
░░ 
░░ The job identifier is 180.
Dec 14 11:26:32 localhost systemd[1]: first-boot-complete.target - First Boot Complete was skipped because of an unmet condition check (ConditionFirstBoot=yes).
░░ Subject: A start job for unit first-boot-complete.target has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit first-boot-complete.target has finished successfully.
░░ 
░░ The job identifier is 138.
Dec 14 11:26:32 localhost systemd[1]: systemd-journal-catalog-update.service - Rebuild Journal Catalog was skipped because of an unmet condition check (ConditionNeedsUpdate=/var).
░░ Subject: A start job for unit systemd-journal-catalog-update.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit systemd-journal-catalog-update.service has finished successfully.
░░ 
░░ The job identifier is 131.
Dec 14 11:26:32 localhost systemd[1]: Starting systemd-machine-id-commit.service - Save Transient machine-id to Disk...
░░ Subject: A start job for unit systemd-machine-id-commit.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit systemd-machine-id-commit.service has begun execution.
░░ 
░░ The job identifier is 194.
Dec 14 11:26:32 localhost systemd[1]: systemd-update-done.service - Update is Completed was skipped because no trigger condition checks were met.
░░ Subject: A start job for unit systemd-update-done.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit systemd-update-done.service has finished successfully.
░░ 
░░ The job identifier is 168.
Dec 14 11:26:32 localhost systemd[1]: Condition check resulted in dev-ttyS0.device - /dev/ttyS0 being skipped.
░░ Subject: A start job for unit dev-ttyS0.device has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit dev-ttyS0.device has finished successfully.
░░ 
░░ The job identifier is 232.
Dec 14 11:26:32 localhost systemd[1]: Finished systemd-machine-id-commit.service - Save Transient machine-id to Disk.
░░ Subject: A start job for unit systemd-machine-id-commit.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit systemd-machine-id-commit.service has finished successfully.
░░ 
░░ The job identifier is 194.
Dec 14 11:26:32 localhost systemd[1]: run-credentials-systemd\x2dnetwork\x2dgenerator.service.mount: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ The unit run-credentials-systemd\x2dnetwork\x2dgenerator.service.mount has successfully entered the 'dead' state.
Dec 14 11:26:32 localhost systemd[1]: run-credentials-systemd\x2dsysctl.service.mount: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ The unit run-credentials-systemd\x2dsysctl.service.mount has successfully entered the 'dead' state.
Dec 14 11:26:32 localhost systemd[1]: run-credentials-systemd\x2dudev\x2dload\x2dcredentials.service.mount: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ The unit run-credentials-systemd\x2dudev\x2dload\x2dcredentials.service.mount has successfully entered the 'dead' state.
Dec 14 11:26:32 localhost systemd[1]: etc-machine\x2did.mount: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ The unit etc-machine\x2did.mount has successfully entered the 'dead' state.
Dec 14 11:26:32 localhost systemd[1]: Mounting sys-fs-fuse-connections.mount - FUSE Control File System...
░░ Subject: A start job for unit sys-fs-fuse-connections.mount has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit sys-fs-fuse-connections.mount has begun execution.
░░ 
░░ The job identifier is 166.
Dec 14 11:26:32 localhost systemd[1]: Mounting var-lib-nfs-rpc_pipefs.mount - RPC Pipe File System...
░░ Subject: A start job for unit var-lib-nfs-rpc_pipefs.mount has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit var-lib-nfs-rpc_pipefs.mount has begun execution.
░░ 
░░ The job identifier is 247.
Dec 14 11:26:32 localhost systemd[1]: Mounted sys-fs-fuse-connections.mount - FUSE Control File System.
░░ Subject: A start job for unit sys-fs-fuse-connections.mount has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit sys-fs-fuse-connections.mount has finished successfully.
░░ 
░░ The job identifier is 166.
Dec 14 11:26:32 localhost (udev-worker)[574]: Network interface NamePolicy= disabled on kernel command line.
Dec 14 11:26:32 localhost kernel: RPC: Registered named UNIX socket transport module.
Dec 14 11:26:32 localhost kernel: RPC: Registered udp transport module.
Dec 14 11:26:32 localhost kernel: RPC: Registered tcp transport module.
Dec 14 11:26:32 localhost kernel: RPC: Registered tcp-with-tls transport module.
Dec 14 11:26:32 localhost kernel: RPC: Registered tcp NFSv4.1 backchannel transport module.
Dec 14 11:26:32 localhost systemd[1]: Mounted var-lib-nfs-rpc_pipefs.mount - RPC Pipe File System.
░░ Subject: A start job for unit var-lib-nfs-rpc_pipefs.mount has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit var-lib-nfs-rpc_pipefs.mount has finished successfully.
░░ 
░░ The job identifier is 247.
Dec 14 11:26:32 localhost systemd[1]: Reached target rpc_pipefs.target.
░░ Subject: A start job for unit rpc_pipefs.target has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit rpc_pipefs.target has finished successfully.
░░ 
░░ The job identifier is 246.
Dec 14 11:26:33 localhost augenrules[584]: /sbin/augenrules: No change
Dec 14 11:26:33 localhost augenrules[615]: No rules
Dec 14 11:26:33 localhost augenrules[615]: enabled 0
Dec 14 11:26:33 localhost augenrules[615]: failure 1
Dec 14 11:26:33 localhost augenrules[615]: pid 0
Dec 14 11:26:33 localhost augenrules[615]: rate_limit 0
Dec 14 11:26:33 localhost augenrules[615]: backlog_limit 8192
Dec 14 11:26:33 localhost augenrules[615]: lost 0
Dec 14 11:26:33 localhost augenrules[615]: backlog 0
Dec 14 11:26:33 localhost augenrules[615]: backlog_wait_time 60000
Dec 14 11:26:33 localhost augenrules[615]: backlog_wait_time_actual 0
Dec 14 11:26:33 localhost augenrules[615]: enabled 0
Dec 14 11:26:33 localhost augenrules[615]: failure 1
Dec 14 11:26:33 localhost augenrules[615]: pid 0
Dec 14 11:26:33 localhost augenrules[615]: rate_limit 0
Dec 14 11:26:33 localhost augenrules[615]: backlog_limit 8192
Dec 14 11:26:33 localhost augenrules[615]: lost 0
Dec 14 11:26:33 localhost augenrules[615]: backlog 0
Dec 14 11:26:33 localhost augenrules[615]: backlog_wait_time 60000
Dec 14 11:26:33 localhost augenrules[615]: backlog_wait_time_actual 0
Dec 14 11:26:33 localhost augenrules[615]: enabled 0
Dec 14 11:26:33 localhost augenrules[615]: failure 1
Dec 14 11:26:33 localhost augenrules[615]: pid 0
Dec 14 11:26:33 localhost augenrules[615]: rate_limit 0
Dec 14 11:26:33 localhost augenrules[615]: backlog_limit 8192
Dec 14 11:26:33 localhost augenrules[615]: lost 0
Dec 14 11:26:33 localhost augenrules[615]: backlog 0
Dec 14 11:26:33 localhost augenrules[615]: backlog_wait_time 60000
Dec 14 11:26:33 localhost augenrules[615]: backlog_wait_time_actual 0
Dec 14 11:26:33 localhost systemd[1]: audit-rules.service: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ The unit audit-rules.service has successfully entered the 'dead' state.
Dec 14 11:26:33 localhost systemd[1]: Finished audit-rules.service - Load Audit Rules.
░░ Subject: A start job for unit audit-rules.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit audit-rules.service has finished successfully.
░░ 
░░ The job identifier is 236.
Dec 14 11:26:33 localhost systemd[1]: Starting auditd.service - Security Audit Logging Service...
░░ Subject: A start job for unit auditd.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit auditd.service has begun execution.
░░ 
░░ The job identifier is 235.
Dec 14 11:26:33 localhost kernel: input: PC Speaker as /devices/platform/pcspkr/input/input5
Dec 14 11:26:33 localhost kernel: cirrus 0000:00:02.0: vgaarb: deactivate vga console
Dec 14 11:26:33 localhost kernel: Console: switching to colour dummy device 80x25
Dec 14 11:26:33 localhost kernel: [drm] Initialized cirrus 2.0.0 for 0000:00:02.0 on minor 0
Dec 14 11:26:33 localhost kernel: fbcon: cirrusdrmfb (fb0) is primary device
Dec 14 11:26:33 localhost kernel: Console: switching to colour frame buffer device 128x48
Dec 14 11:26:33 localhost kernel: cirrus 0000:00:02.0: [drm] fb0: cirrusdrmfb frame buffer device
Dec 14 11:26:33 localhost systemd[1]: Started auditd.service - Security Audit Logging Service.
░░ Subject: A start job for unit auditd.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit auditd.service has finished successfully.
░░ 
░░ The job identifier is 235.
Dec 14 11:26:33 localhost auditd[625]: No plugins found, not dispatching events
Dec 14 11:26:33 localhost systemd[1]: Starting systemd-update-utmp.service - Record System Boot/Shutdown in UTMP...
░░ Subject: A start job for unit systemd-update-utmp.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit systemd-update-utmp.service has begun execution.
░░ 
░░ The job identifier is 258.
Dec 14 11:26:33 localhost auditd[625]: Init complete, auditd 4.0 listening for events (startup state enable)
Dec 14 11:26:33 localhost systemd[1]: Started rpcbind.service - RPC Bind.
░░ Subject: A start job for unit rpcbind.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit rpcbind.service has finished successfully.
░░ 
░░ The job identifier is 253.
Dec 14 11:26:33 localhost systemd[1]: Finished systemd-update-utmp.service - Record System Boot/Shutdown in UTMP.
░░ Subject: A start job for unit systemd-update-utmp.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit systemd-update-utmp.service has finished successfully.
░░ 
░░ The job identifier is 258.
Dec 14 11:26:33 localhost systemd[1]: Reached target sysinit.target - System Initialization.
░░ Subject: A start job for unit sysinit.target has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit sysinit.target has finished successfully.
░░ 
░░ The job identifier is 125.
Dec 14 11:26:33 localhost systemd[1]: Started dnf-makecache.timer - dnf makecache --timer.
░░ Subject: A start job for unit dnf-makecache.timer has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit dnf-makecache.timer has finished successfully.
░░ 
░░ The job identifier is 202.
Dec 14 11:26:33 localhost kernel: RAPL PMU: API unit is 2^-32 Joules, 0 fixed counters, 655360 ms ovfl timer
Dec 14 11:26:33 localhost systemd[1]: Started fstrim.timer - Discard unused filesystem blocks once a week.
░░ Subject: A start job for unit fstrim.timer has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit fstrim.timer has finished successfully.
░░ 
░░ The job identifier is 201.
Dec 14 11:26:33 localhost kernel: piix4_smbus 0000:00:01.3: SMBus base address uninitialized - upgrade BIOS or use force_addr=0xaddr
Dec 14 11:26:33 localhost systemd[1]: Started logrotate.timer - Daily rotation of log files.
░░ Subject: A start job for unit logrotate.timer has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit logrotate.timer has finished successfully.
░░ 
░░ The job identifier is 209.
Dec 14 11:26:33 localhost systemd[1]: Started systemd-tmpfiles-clean.timer - Daily Cleanup of Temporary Directories.
░░ Subject: A start job for unit systemd-tmpfiles-clean.timer has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit systemd-tmpfiles-clean.timer has finished successfully.
░░ 
░░ The job identifier is 210.
Dec 14 11:26:33 localhost systemd[1]: Reached target timers.target - Timer Units.
░░ Subject: A start job for unit timers.target has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit timers.target has finished successfully.
░░ 
░░ The job identifier is 200.
Dec 14 11:26:33 localhost systemd[1]: Listening on dbus.socket - D-Bus System Message Bus Socket.
░░ Subject: A start job for unit dbus.socket has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit dbus.socket has finished successfully.
░░ 
░░ The job identifier is 206.
Dec 14 11:26:33 localhost systemd[1]: Listening on pcscd.socket - PC/SC Smart Card Daemon Activation Socket.
░░ Subject: A start job for unit pcscd.socket has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit pcscd.socket has finished successfully.
░░ 
░░ The job identifier is 222.
Dec 14 11:26:33 localhost systemd[1]: Listening on sssd-kcm.socket - SSSD Kerberos Cache Manager responder socket.
░░ Subject: A start job for unit sssd-kcm.socket has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit sssd-kcm.socket has finished successfully.
░░ 
░░ The job identifier is 214.
Dec 14 11:26:33 localhost systemd[1]: Listening on systemd-hostnamed.socket - Hostname Service Socket.
░░ Subject: A start job for unit systemd-hostnamed.socket has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit systemd-hostnamed.socket has finished successfully.
░░ 
░░ The job identifier is 223.
Dec 14 11:26:33 localhost systemd[1]: Reached target sockets.target - Socket Units.
░░ Subject: A start job for unit sockets.target has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit sockets.target has finished successfully.
░░ 
░░ The job identifier is 211.
Dec 14 11:26:33 localhost systemd[1]: Starting dbus-broker.service - D-Bus System Message Bus...
░░ Subject: A start job for unit dbus-broker.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit dbus-broker.service has begun execution.
░░ 
░░ The job identifier is 207.
Dec 14 11:26:33 localhost systemd[1]: systemd-pcrphase-sysinit.service - TPM PCR Barrier (Initialization) was skipped because of an unmet condition check (ConditionSecurity=measured-uki).
░░ Subject: A start job for unit systemd-pcrphase-sysinit.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit systemd-pcrphase-sysinit.service has finished successfully.
░░ 
░░ The job identifier is 135.
Dec 14 11:26:33 localhost systemd[1]: Starting systemd-vconsole-setup.service - Virtual Console Setup...
░░ Subject: A start job for unit systemd-vconsole-setup.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit systemd-vconsole-setup.service has begun execution.
░░ 
░░ The job identifier is 318.
Dec 14 11:26:33 localhost systemd[1]: systemd-vconsole-setup.service: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ The unit systemd-vconsole-setup.service has successfully entered the 'dead' state.
Dec 14 11:26:33 localhost systemd[1]: Stopped systemd-vconsole-setup.service - Virtual Console Setup.
░░ Subject: A stop job for unit systemd-vconsole-setup.service has finished
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A stop job for unit systemd-vconsole-setup.service has finished.
░░ 
░░ The job identifier is 318 and the job result is done.
Dec 14 11:26:33 localhost systemd[1]: Starting systemd-vconsole-setup.service - Virtual Console Setup...
░░ Subject: A start job for unit systemd-vconsole-setup.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit systemd-vconsole-setup.service has begun execution.
░░ 
░░ The job identifier is 318.
Dec 14 11:26:33 localhost systemd[1]: Started dbus-broker.service - D-Bus System Message Bus.
░░ Subject: A start job for unit dbus-broker.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit dbus-broker.service has finished successfully.
░░ 
░░ The job identifier is 207.
Dec 14 11:26:33 localhost systemd[1]: Reached target basic.target - Basic System.
░░ Subject: A start job for unit basic.target has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit basic.target has finished successfully.
░░ 
░░ The job identifier is 122.
Dec 14 11:26:33 localhost dbus-broker-launch[637]: Ready
Dec 14 11:26:33 localhost systemd[1]: Starting chronyd.service - NTP client/server...
░░ Subject: A start job for unit chronyd.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit chronyd.service has begun execution.
░░ 
░░ The job identifier is 268.
Dec 14 11:26:33 localhost systemd[1]: Starting cloud-init-local.service - Initial cloud-init job (pre-networking)...
░░ Subject: A start job for unit cloud-init-local.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit cloud-init-local.service has begun execution.
░░ 
░░ The job identifier is 275.
Dec 14 11:26:33 localhost systemd[1]: Starting dracut-shutdown.service - Restore /run/initramfs on shutdown...
░░ Subject: A start job for unit dracut-shutdown.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit dracut-shutdown.service has begun execution.
░░ 
░░ The job identifier is 184.
Dec 14 11:26:33 localhost systemd[1]: Started irqbalance.service - irqbalance daemon.
░░ Subject: A start job for unit irqbalance.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit irqbalance.service has finished successfully.
░░ 
░░ The job identifier is 234.
Dec 14 11:26:33 localhost systemd[1]: Started rngd.service - Hardware RNG Entropy Gatherer Daemon.
░░ Subject: A start job for unit rngd.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit rngd.service has finished successfully.
░░ 
░░ The job identifier is 259.
Dec 14 11:26:33 localhost systemd[1]: Starting rsyslog.service - System Logging Service...
░░ Subject: A start job for unit rsyslog.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit rsyslog.service has begun execution.
░░ 
░░ The job identifier is 272.
Dec 14 11:26:33 localhost systemd[1]: ssh-host-keys-migration.service - Update OpenSSH host key permissions was skipped because of an unmet condition check (ConditionPathExists=!/var/lib/.ssh-host-keys-migration).
░░ Subject: A start job for unit ssh-host-keys-migration.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit ssh-host-keys-migration.service has finished successfully.
░░ 
░░ The job identifier is 267.
Dec 14 11:26:33 localhost systemd[1]: sshd-keygen@ecdsa.service - OpenSSH ecdsa Server Key Generation was skipped because of an unmet condition check (ConditionPathExists=!/run/systemd/generator.early/multi-user.target.wants/cloud-init.target).
░░ Subject: A start job for unit sshd-keygen@ecdsa.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit sshd-keygen@ecdsa.service has finished successfully.
░░ 
░░ The job identifier is 263.
Dec 14 11:26:33 localhost systemd[1]: sshd-keygen@ed25519.service - OpenSSH ed25519 Server Key Generation was skipped because of an unmet condition check (ConditionPathExists=!/run/systemd/generator.early/multi-user.target.wants/cloud-init.target).
░░ Subject: A start job for unit sshd-keygen@ed25519.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit sshd-keygen@ed25519.service has finished successfully.
░░ 
░░ The job identifier is 265.
Dec 14 11:26:33 localhost systemd[1]: sshd-keygen@rsa.service - OpenSSH rsa Server Key Generation was skipped because of an unmet condition check (ConditionPathExists=!/run/systemd/generator.early/multi-user.target.wants/cloud-init.target).
░░ Subject: A start job for unit sshd-keygen@rsa.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit sshd-keygen@rsa.service has finished successfully.
░░ 
░░ The job identifier is 266.
Dec 14 11:26:33 localhost systemd[1]: Reached target sshd-keygen.target.
░░ Subject: A start job for unit sshd-keygen.target has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit sshd-keygen.target has finished successfully.
░░ 
░░ The job identifier is 262.
Dec 14 11:26:33 localhost systemd[1]: sssd.service - System Security Services Daemon was skipped because no trigger condition checks were met.
░░ Subject: A start job for unit sssd.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit sssd.service has finished successfully.
░░ 
░░ The job identifier is 237.
Dec 14 11:26:33 localhost systemd[1]: Reached target nss-user-lookup.target - User and Group Name Lookups.
░░ Subject: A start job for unit nss-user-lookup.target has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit nss-user-lookup.target has finished successfully.
░░ 
░░ The job identifier is 238.
Dec 14 11:26:33 localhost systemd[1]: Starting systemd-logind.service - User Login Management...
░░ Subject: A start job for unit systemd-logind.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit systemd-logind.service has begun execution.
░░ 
░░ The job identifier is 250.
Dec 14 11:26:33 localhost systemd[1]: Finished dracut-shutdown.service - Restore /run/initramfs on shutdown.
░░ Subject: A start job for unit dracut-shutdown.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit dracut-shutdown.service has finished successfully.
░░ 
░░ The job identifier is 184.
Dec 14 11:26:33 localhost (qbalance)[649]: irqbalance.service: Referenced but unset environment variable evaluates to an empty string: IRQBALANCE_ARGS
Dec 14 11:26:33 localhost systemd[1]: Started rsyslog.service - System Logging Service.
░░ Subject: A start job for unit rsyslog.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit rsyslog.service has finished successfully.
░░ 
░░ The job identifier is 272.
Dec 14 11:26:33 localhost rsyslogd[651]: imjournal: filecreatemode is not set, using default 0644 [v8.2408.0-2.el10 try https://www.rsyslog.com/e/2186 ]
Dec 14 11:26:33 localhost rsyslogd[651]: [origin software="rsyslogd" swVersion="8.2408.0-2.el10" x-pid="651" x-info="https://www.rsyslog.com"] start
Dec 14 11:26:33 localhost systemd-logind[653]: New seat seat0.
░░ Subject: A new seat seat0 is now available
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ Documentation: sd-login(3)
░░ 
░░ A new seat seat0 has been configured and is now available.
Dec 14 11:26:33 localhost systemd-logind[653]: Watching system buttons on /dev/input/event0 (Power Button)
Dec 14 11:26:33 localhost systemd-logind[653]: Watching system buttons on /dev/input/event1 (Sleep Button)
Dec 14 11:26:33 localhost systemd-logind[653]: Watching system buttons on /dev/input/event2 (AT Translated Set 2 keyboard)
Dec 14 11:26:33 localhost systemd[1]: Started systemd-logind.service - User Login Management.
░░ Subject: A start job for unit systemd-logind.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit systemd-logind.service has finished successfully.
░░ 
░░ The job identifier is 250.
Dec 14 11:26:33 localhost systemd[1]: Finished systemd-vconsole-setup.service - Virtual Console Setup.
░░ Subject: A start job for unit systemd-vconsole-setup.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit systemd-vconsole-setup.service has finished successfully.
░░ 
░░ The job identifier is 318.
Dec 14 11:26:33 localhost systemd[1]: run-credentials-systemd\x2dvconsole\x2dsetup.service.mount: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ The unit run-credentials-systemd\x2dvconsole\x2dsetup.service.mount has successfully entered the 'dead' state.
Dec 14 11:26:33 localhost rsyslogd[651]: imjournal: journal files changed, reloading...  [v8.2408.0-2.el10 try https://www.rsyslog.com/e/0 ]
Dec 14 11:26:33 localhost rngd[650]: Disabling 7: PKCS11 Entropy generator (pkcs11)
Dec 14 11:26:33 localhost rngd[650]: Disabling 5: NIST Network Entropy Beacon (nist)
Dec 14 11:26:33 localhost rngd[650]: Disabling 9: Qrypt quantum entropy beacon (qrypt)
Dec 14 11:26:33 localhost rngd[650]: Disabling 10: Named pipe entropy input (namedpipe)
Dec 14 11:26:33 localhost rngd[650]: Initializing available sources
Dec 14 11:26:33 localhost rngd[650]: [hwrng ]: Initialization Failed
Dec 14 11:26:33 localhost rngd[650]: [rdrand]: Enabling RDRAND rng support
Dec 14 11:26:33 localhost rngd[650]: [rdrand]: Initialized
Dec 14 11:26:33 localhost rngd[650]: [jitter]: JITTER timeout set to 5 sec
Dec 14 11:26:33 localhost chronyd[664]: chronyd version 4.6.1 starting (+CMDMON +NTP +REFCLOCK +RTC +PRIVDROP +SCFILTER +SIGND +ASYNCDNS +NTS +SECHASH +IPV6 +DEBUG)
Dec 14 11:26:33 localhost rngd[650]: [jitter]: Initializing AES buffer
Dec 14 11:26:33 localhost chronyd[664]: Frequency 0.000 +/- 1000000.000 ppm read from /var/lib/chrony/drift
Dec 14 11:26:33 localhost systemd[1]: Started chronyd.service - NTP client/server.
░░ Subject: A start job for unit chronyd.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit chronyd.service has finished successfully.
░░ 
░░ The job identifier is 268.
Dec 14 11:26:33 localhost chronyd[664]: Loaded seccomp filter (level 2)
Dec 14 11:26:36 localhost cloud-init[671]: Cloud-init v. 24.1.4-21.el10 running 'init-local' at Sat, 14 Dec 2024 16:26:36 +0000. Up 12.60 seconds.
Dec 14 11:26:36 localhost dhcpcd[673]: dhcpcd-10.0.6 starting
Dec 14 11:26:36 localhost kernel: 8021q: 802.1Q VLAN Support v1.8
Dec 14 11:26:36 localhost systemd[1]: Listening on systemd-rfkill.socket - Load/Save RF Kill Switch Status /dev/rfkill Watch.
░░ Subject: A start job for unit systemd-rfkill.socket has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit systemd-rfkill.socket has finished successfully.
░░ 
░░ The job identifier is 328.
Dec 14 11:26:37 localhost kernel: cfg80211: Loading compiled-in X.509 certificates for regulatory database
Dec 14 11:26:37 localhost kernel: Loaded X.509 cert 'sforshee: 00b28ddf47aef9cea7'
Dec 14 11:26:37 localhost kernel: Loaded X.509 cert 'wens: 61c038651aabdcf94bd0ac7ff06c7248db18c600'
Dec 14 11:26:37 localhost dhcpcd[676]: DUID 00:01:00:01:2e:f0:6e:3d:0e:03:6a:4a:4d:55
Dec 14 11:26:37 localhost dhcpcd[676]: eth0: IAID 6a:4a:4d:55
Dec 14 11:26:37 localhost kernel: platform regulatory.0: Direct firmware load for regulatory.db failed with error -2
Dec 14 11:26:37 localhost kernel: cfg80211: failed to load regulatory.db
Dec 14 11:26:38 localhost rngd[650]: [jitter]: Unable to obtain AES key, disabling JITTER source
Dec 14 11:26:38 localhost rngd[650]: [jitter]: Initialization Failed
Dec 14 11:26:38 localhost rngd[650]: Process privileges have been dropped to 2:2
Dec 14 11:26:38 localhost dhcpcd[676]: eth0: soliciting a DHCP lease
Dec 14 11:26:38 localhost dhcpcd[676]: eth0: offered 10.31.43.117 from 10.31.40.1
Dec 14 11:26:38 localhost dhcpcd[676]: eth0: leased 10.31.43.117 for 3600 seconds
Dec 14 11:26:38 localhost dhcpcd[676]: eth0: adding route to 10.31.40.0/22
Dec 14 11:26:38 localhost dhcpcd[676]: eth0: adding default route via 10.31.40.1
Dec 14 11:26:38 localhost dhcpcd[676]: control command: /usr/sbin/dhcpcd --dumplease --ipv4only eth0
Dec 14 11:26:38 localhost systemd[1]: Starting systemd-hostnamed.service - Hostname Service...
░░ Subject: A start job for unit systemd-hostnamed.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit systemd-hostnamed.service has begun execution.
░░ 
░░ The job identifier is 337.
Dec 14 11:26:38 localhost systemd[1]: Started systemd-hostnamed.service - Hostname Service.
░░ Subject: A start job for unit systemd-hostnamed.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit systemd-hostnamed.service has finished successfully.
░░ 
░░ The job identifier is 337.
Dec 14 11:26:38 ip-10-31-43-117.us-east-1.aws.redhat.com systemd-hostnamed[696]: Hostname set to <ip-10-31-43-117.us-east-1.aws.redhat.com> (static)
Dec 14 11:26:38 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: Finished cloud-init-local.service - Initial cloud-init job (pre-networking).
░░ Subject: A start job for unit cloud-init-local.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit cloud-init-local.service has finished successfully.
░░ 
░░ The job identifier is 275.
Dec 14 11:26:38 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: Reached target network-pre.target - Preparation for Network.
░░ Subject: A start job for unit network-pre.target has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit network-pre.target has finished successfully.
░░ 
░░ The job identifier is 156.
Dec 14 11:26:38 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: Starting NetworkManager.service - Network Manager...
░░ Subject: A start job for unit NetworkManager.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit NetworkManager.service has begun execution.
░░ 
░░ The job identifier is 205.
Dec 14 11:26:38 ip-10-31-43-117.us-east-1.aws.redhat.com NetworkManager[703]: <info>  [1734193598.7758] NetworkManager (version 1.51.4-1.el10) is starting... (boot:38eff4b5-157f-400c-9c9a-01c5bd7302d2)
Dec 14 11:26:38 ip-10-31-43-117.us-east-1.aws.redhat.com NetworkManager[703]: <info>  [1734193598.7760] Read config: /etc/NetworkManager/NetworkManager.conf, /etc/NetworkManager/conf.d/30-cloud-init-ip6-addr-gen-mode.conf
Dec 14 11:26:38 ip-10-31-43-117.us-east-1.aws.redhat.com NetworkManager[703]: <info>  [1734193598.7889] manager[0x557f01cc5a10]: monitoring kernel firmware directory '/lib/firmware'.
Dec 14 11:26:38 ip-10-31-43-117.us-east-1.aws.redhat.com NetworkManager[703]: <info>  [1734193598.7922] hostname: hostname: using hostnamed
Dec 14 11:26:38 ip-10-31-43-117.us-east-1.aws.redhat.com NetworkManager[703]: <info>  [1734193598.7922] hostname: static hostname changed from (none) to "ip-10-31-43-117.us-east-1.aws.redhat.com"
Dec 14 11:26:38 ip-10-31-43-117.us-east-1.aws.redhat.com NetworkManager[703]: <info>  [1734193598.7926] dns-mgr: init: dns=default,systemd-resolved rc-manager=symlink (auto)
Dec 14 11:26:38 ip-10-31-43-117.us-east-1.aws.redhat.com NetworkManager[703]: <info>  [1734193598.7930] manager[0x557f01cc5a10]: rfkill: Wi-Fi hardware radio set enabled
Dec 14 11:26:38 ip-10-31-43-117.us-east-1.aws.redhat.com NetworkManager[703]: <info>  [1734193598.7930] manager[0x557f01cc5a10]: rfkill: WWAN hardware radio set enabled
Dec 14 11:26:38 ip-10-31-43-117.us-east-1.aws.redhat.com NetworkManager[703]: <info>  [1734193598.7986] manager: rfkill: Wi-Fi enabled by radio killswitch; enabled by state file
Dec 14 11:26:38 ip-10-31-43-117.us-east-1.aws.redhat.com NetworkManager[703]: <info>  [1734193598.7987] manager: rfkill: WWAN enabled by radio killswitch; enabled by state file
Dec 14 11:26:38 ip-10-31-43-117.us-east-1.aws.redhat.com NetworkManager[703]: <info>  [1734193598.7987] manager: Networking is enabled by state file
Dec 14 11:26:38 ip-10-31-43-117.us-east-1.aws.redhat.com NetworkManager[703]: <info>  [1734193598.8012] settings: Loaded settings plugin: keyfile (internal)
Dec 14 11:26:38 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: Starting NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service...
░░ Subject: A start job for unit NetworkManager-dispatcher.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit NetworkManager-dispatcher.service has begun execution.
░░ 
░░ The job identifier is 415.
Dec 14 11:26:38 ip-10-31-43-117.us-east-1.aws.redhat.com NetworkManager[703]: <info>  [1734193598.8087] dhcp: init: Using DHCP client 'internal'
Dec 14 11:26:38 ip-10-31-43-117.us-east-1.aws.redhat.com NetworkManager[703]: <info>  [1734193598.8090] manager: (lo): new Loopback device (/org/freedesktop/NetworkManager/Devices/1)
Dec 14 11:26:38 ip-10-31-43-117.us-east-1.aws.redhat.com NetworkManager[703]: <info>  [1734193598.8101] device (lo): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external')
Dec 14 11:26:38 ip-10-31-43-117.us-east-1.aws.redhat.com NetworkManager[703]: <info>  [1734193598.8125] device (lo): state change: unavailable -> disconnected (reason 'connection-assumed', managed-type: 'external')
Dec 14 11:26:38 ip-10-31-43-117.us-east-1.aws.redhat.com NetworkManager[703]: <info>  [1734193598.8131] device (lo): Activation: starting connection 'lo' (77f275e6-4c01-4392-ab9b-e140983cfde9)
Dec 14 11:26:38 ip-10-31-43-117.us-east-1.aws.redhat.com NetworkManager[703]: <info>  [1734193598.8137] manager: (eth0): new Ethernet device (/org/freedesktop/NetworkManager/Devices/2)
Dec 14 11:26:38 ip-10-31-43-117.us-east-1.aws.redhat.com NetworkManager[703]: <info>  [1734193598.8141] device (eth0): state change: unmanaged -> unavailable (reason 'managed', managed-type: 'external')
Dec 14 11:26:38 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: Started NetworkManager.service - Network Manager.
░░ Subject: A start job for unit NetworkManager.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit NetworkManager.service has finished successfully.
░░ 
░░ The job identifier is 205.
Dec 14 11:26:38 ip-10-31-43-117.us-east-1.aws.redhat.com NetworkManager[703]: <info>  [1734193598.8174] bus-manager: acquired D-Bus service "org.freedesktop.NetworkManager"
Dec 14 11:26:38 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: Reached target network.target - Network.
░░ Subject: A start job for unit network.target has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit network.target has finished successfully.
░░ 
░░ The job identifier is 208.
Dec 14 11:26:38 ip-10-31-43-117.us-east-1.aws.redhat.com NetworkManager[703]: <info>  [1734193598.8196] device (lo): state change: disconnected -> prepare (reason 'none', managed-type: 'external')
Dec 14 11:26:38 ip-10-31-43-117.us-east-1.aws.redhat.com NetworkManager[703]: <info>  [1734193598.8198] device (lo): state change: prepare -> config (reason 'none', managed-type: 'external')
Dec 14 11:26:38 ip-10-31-43-117.us-east-1.aws.redhat.com NetworkManager[703]: <info>  [1734193598.8200] device (lo): state change: config -> ip-config (reason 'none', managed-type: 'external')
Dec 14 11:26:38 ip-10-31-43-117.us-east-1.aws.redhat.com NetworkManager[703]: <info>  [1734193598.8201] device (eth0): carrier: link connected
Dec 14 11:26:38 ip-10-31-43-117.us-east-1.aws.redhat.com NetworkManager[703]: <info>  [1734193598.8203] device (lo): state change: ip-config -> ip-check (reason 'none', managed-type: 'external')
Dec 14 11:26:38 ip-10-31-43-117.us-east-1.aws.redhat.com NetworkManager[703]: <info>  [1734193598.8218] device (eth0): state change: unavailable -> disconnected (reason 'carrier-changed', managed-type: 'full')
Dec 14 11:26:38 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: Starting NetworkManager-wait-online.service - Network Manager Wait Online...
░░ Subject: A start job for unit NetworkManager-wait-online.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit NetworkManager-wait-online.service has begun execution.
░░ 
░░ The job identifier is 204.
Dec 14 11:26:38 ip-10-31-43-117.us-east-1.aws.redhat.com NetworkManager[703]: <info>  [1734193598.8237] policy: auto-activating connection 'cloud-init eth0' (1dd9a779-d327-56e1-8454-c65e2556c12c)
Dec 14 11:26:38 ip-10-31-43-117.us-east-1.aws.redhat.com NetworkManager[703]: <info>  [1734193598.8242] device (eth0): Activation: starting connection 'cloud-init eth0' (1dd9a779-d327-56e1-8454-c65e2556c12c)
Dec 14 11:26:38 ip-10-31-43-117.us-east-1.aws.redhat.com NetworkManager[703]: <info>  [1734193598.8245] device (eth0): state change: disconnected -> prepare (reason 'none', managed-type: 'full')
Dec 14 11:26:38 ip-10-31-43-117.us-east-1.aws.redhat.com NetworkManager[703]: <info>  [1734193598.8248] manager: NetworkManager state is now CONNECTING
Dec 14 11:26:38 ip-10-31-43-117.us-east-1.aws.redhat.com NetworkManager[703]: <info>  [1734193598.8252] device (eth0): state change: prepare -> config (reason 'none', managed-type: 'full')
Dec 14 11:26:38 ip-10-31-43-117.us-east-1.aws.redhat.com NetworkManager[703]: <info>  [1734193598.8261] device (eth0): state change: config -> ip-config (reason 'none', managed-type: 'full')
Dec 14 11:26:38 ip-10-31-43-117.us-east-1.aws.redhat.com NetworkManager[703]: <info>  [1734193598.8272] dhcp4 (eth0): activation: beginning transaction (timeout in 45 seconds)
Dec 14 11:26:38 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: Starting gssproxy.service - GSSAPI Proxy Daemon...
░░ Subject: A start job for unit gssproxy.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit gssproxy.service has begun execution.
░░ 
░░ The job identifier is 244.
Dec 14 11:26:38 ip-10-31-43-117.us-east-1.aws.redhat.com NetworkManager[703]: <info>  [1734193598.8299] dhcp4 (eth0): state changed new lease, address=10.31.43.117, acd pending
Dec 14 11:26:38 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: Started gssproxy.service - GSSAPI Proxy Daemon.
░░ Subject: A start job for unit gssproxy.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit gssproxy.service has finished successfully.
░░ 
░░ The job identifier is 244.
Dec 14 11:26:38 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: rpc-gssd.service - RPC security service for NFS client and server was skipped because of an unmet condition check (ConditionPathExists=/etc/krb5.keytab).
░░ Subject: A start job for unit rpc-gssd.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit rpc-gssd.service has finished successfully.
░░ 
░░ The job identifier is 245.
Dec 14 11:26:38 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: Reached target nfs-client.target - NFS client services.
░░ Subject: A start job for unit nfs-client.target has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit nfs-client.target has finished successfully.
░░ 
░░ The job identifier is 241.
Dec 14 11:26:38 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: Reached target remote-fs-pre.target - Preparation for Remote File Systems.
░░ Subject: A start job for unit remote-fs-pre.target has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit remote-fs-pre.target has finished successfully.
░░ 
░░ The job identifier is 249.
Dec 14 11:26:38 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: Reached target remote-cryptsetup.target - Remote Encrypted Volumes.
░░ Subject: A start job for unit remote-cryptsetup.target has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit remote-cryptsetup.target has finished successfully.
░░ 
░░ The job identifier is 260.
Dec 14 11:26:38 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: Reached target remote-fs.target - Remote File Systems.
░░ Subject: A start job for unit remote-fs.target has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit remote-fs.target has finished successfully.
░░ 
░░ The job identifier is 271.
Dec 14 11:26:38 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: systemd-pcrphase.service - TPM PCR Barrier (User) was skipped because of an unmet condition check (ConditionSecurity=measured-uki).
░░ Subject: A start job for unit systemd-pcrphase.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit systemd-pcrphase.service has finished successfully.
░░ 
░░ The job identifier is 170.
Dec 14 11:26:38 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: Started NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service.
░░ Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit NetworkManager-dispatcher.service has finished successfully.
░░ 
░░ The job identifier is 415.
Dec 14 11:26:38 ip-10-31-43-117.us-east-1.aws.redhat.com NetworkManager[703]: <info>  [1734193598.9503] device (lo): state change: ip-check -> secondaries (reason 'none', managed-type: 'external')
Dec 14 11:26:38 ip-10-31-43-117.us-east-1.aws.redhat.com NetworkManager[703]: <info>  [1734193598.9512] device (lo): state change: secondaries -> activated (reason 'none', managed-type: 'external')
Dec 14 11:26:38 ip-10-31-43-117.us-east-1.aws.redhat.com NetworkManager[703]: <info>  [1734193598.9539] device (lo): Activation: successful, device activated.
Dec 14 11:26:38 ip-10-31-43-117.us-east-1.aws.redhat.com NetworkManager[703]: <info>  [1734193598.9964] dhcp4 (eth0): state changed new lease, address=10.31.43.117
Dec 14 11:26:38 ip-10-31-43-117.us-east-1.aws.redhat.com NetworkManager[703]: <info>  [1734193598.9975] policy: set 'cloud-init eth0' (eth0) as default for IPv4 routing and DNS
Dec 14 11:26:39 ip-10-31-43-117.us-east-1.aws.redhat.com NetworkManager[703]: <info>  [1734193599.0371] device (eth0): state change: ip-config -> ip-check (reason 'none', managed-type: 'full')
Dec 14 11:26:39 ip-10-31-43-117.us-east-1.aws.redhat.com NetworkManager[703]: <info>  [1734193599.0438] device (eth0): state change: ip-check -> secondaries (reason 'none', managed-type: 'full')
Dec 14 11:26:39 ip-10-31-43-117.us-east-1.aws.redhat.com NetworkManager[703]: <info>  [1734193599.0445] device (eth0): state change: secondaries -> activated (reason 'none', managed-type: 'full')
Dec 14 11:26:39 ip-10-31-43-117.us-east-1.aws.redhat.com NetworkManager[703]: <info>  [1734193599.0450] manager: NetworkManager state is now CONNECTED_SITE
Dec 14 11:26:39 ip-10-31-43-117.us-east-1.aws.redhat.com NetworkManager[703]: <info>  [1734193599.0460] device (eth0): Activation: successful, device activated.
Dec 14 11:26:39 ip-10-31-43-117.us-east-1.aws.redhat.com NetworkManager[703]: <info>  [1734193599.0466] manager: NetworkManager state is now CONNECTED_GLOBAL
Dec 14 11:26:39 ip-10-31-43-117.us-east-1.aws.redhat.com NetworkManager[703]: <info>  [1734193599.0468] manager: startup complete
Dec 14 11:26:39 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: Finished NetworkManager-wait-online.service - Network Manager Wait Online.
░░ Subject: A start job for unit NetworkManager-wait-online.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit NetworkManager-wait-online.service has finished successfully.
░░ 
░░ The job identifier is 204.
Dec 14 11:26:39 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: Starting cloud-init.service - Initial cloud-init job (metadata service crawler)...
░░ Subject: A start job for unit cloud-init.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit cloud-init.service has begun execution.
░░ 
░░ The job identifier is 274.
Dec 14 11:26:39 ip-10-31-43-117.us-east-1.aws.redhat.com chronyd[664]: Added source 10.11.160.238
Dec 14 11:26:39 ip-10-31-43-117.us-east-1.aws.redhat.com chronyd[664]: Added source 10.18.100.10
Dec 14 11:26:39 ip-10-31-43-117.us-east-1.aws.redhat.com chronyd[664]: Added source 10.2.32.37
Dec 14 11:26:39 ip-10-31-43-117.us-east-1.aws.redhat.com chronyd[664]: Added source 10.2.32.38
Dec 14 11:26:39 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: Cloud-init v. 24.1.4-21.el10 running 'init' at Sat, 14 Dec 2024 16:26:39 +0000. Up 15.52 seconds.
Dec 14 11:26:39 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: ci-info: ++++++++++++++++++++++++++++++++++++++Net device info+++++++++++++++++++++++++++++++++++++++
Dec 14 11:26:39 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: ci-info: +--------+------+-----------------------------+---------------+--------+-------------------+
Dec 14 11:26:39 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: ci-info: | Device |  Up  |           Address           |      Mask     | Scope  |     Hw-Address    |
Dec 14 11:26:39 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: ci-info: +--------+------+-----------------------------+---------------+--------+-------------------+
Dec 14 11:26:39 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: ci-info: |  eth0  | True |         10.31.43.117        | 255.255.252.0 | global | 0e:03:6a:4a:4d:55 |
Dec 14 11:26:39 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: ci-info: |  eth0  | True | fe80::c03:6aff:fe4a:4d55/64 |       .       |  link  | 0e:03:6a:4a:4d:55 |
Dec 14 11:26:39 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: ci-info: |   lo   | True |          127.0.0.1          |   255.0.0.0   |  host  |         .         |
Dec 14 11:26:39 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: ci-info: |   lo   | True |           ::1/128           |       .       |  host  |         .         |
Dec 14 11:26:39 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: ci-info: +--------+------+-----------------------------+---------------+--------+-------------------+
Dec 14 11:26:39 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: ci-info: ++++++++++++++++++++++++++++Route IPv4 info+++++++++++++++++++++++++++++
Dec 14 11:26:39 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: ci-info: +-------+-------------+------------+---------------+-----------+-------+
Dec 14 11:26:39 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: ci-info: | Route | Destination |  Gateway   |    Genmask    | Interface | Flags |
Dec 14 11:26:39 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: ci-info: +-------+-------------+------------+---------------+-----------+-------+
Dec 14 11:26:39 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: ci-info: |   0   |   0.0.0.0   | 10.31.40.1 |    0.0.0.0    |    eth0   |   UG  |
Dec 14 11:26:39 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: ci-info: |   1   |  10.31.40.0 |  0.0.0.0   | 255.255.252.0 |    eth0   |   U   |
Dec 14 11:26:39 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: ci-info: +-------+-------------+------------+---------------+-----------+-------+
Dec 14 11:26:39 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: ci-info: +++++++++++++++++++Route IPv6 info+++++++++++++++++++
Dec 14 11:26:39 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: ci-info: +-------+-------------+---------+-----------+-------+
Dec 14 11:26:39 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: ci-info: | Route | Destination | Gateway | Interface | Flags |
Dec 14 11:26:39 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: ci-info: +-------+-------------+---------+-----------+-------+
Dec 14 11:26:39 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: ci-info: |   0   |  fe80::/64  |    ::   |    eth0   |   U   |
Dec 14 11:26:39 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: ci-info: |   2   |  multicast  |    ::   |    eth0   |   U   |
Dec 14 11:26:39 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: ci-info: +-------+-------------+---------+-----------+-------+
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: Generating public/private rsa key pair.
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: Your identification has been saved in /etc/ssh/ssh_host_rsa_key
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: Your public key has been saved in /etc/ssh/ssh_host_rsa_key.pub
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: The key fingerprint is:
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: SHA256:4kPeOc6dyUInlbtyxVsYnSTAiNqxtK7A/xVLG9s/emE root@ip-10-31-43-117.us-east-1.aws.redhat.com
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: The key's randomart image is:
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: +---[RSA 3072]----+
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: |       . o..     |
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: |      + . . . .  |
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: |     + +   . + . |
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: |    . +   o . o  |
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: | .   .o S. o o   |
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: |  o  +.+oOo E .  |
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: |   o .+.Oo.+ +   |
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: |    o  =o+o++    |
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: |     .. o+*o..   |
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: +----[SHA256]-----+
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: Generating public/private ecdsa key pair.
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: Your identification has been saved in /etc/ssh/ssh_host_ecdsa_key
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: Your public key has been saved in /etc/ssh/ssh_host_ecdsa_key.pub
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: The key fingerprint is:
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: SHA256:RdoWwMOSgw51mBD28FuLabo0FGe7XvtI9kDaq60uA5s root@ip-10-31-43-117.us-east-1.aws.redhat.com
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: The key's randomart image is:
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: +---[ECDSA 256]---+
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: |  =+.+.+..o      |
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: | ..++.+ ++ .     |
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: |  .o= .o..+      |
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: |   +.* . o       |
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: |  . * o S        |
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: |.. o =           |
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: | ++ o *          |
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: |E.o+ = *         |
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: |  .+=o=.o        |
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: +----[SHA256]-----+
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: Generating public/private ed25519 key pair.
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: Your identification has been saved in /etc/ssh/ssh_host_ed25519_key
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: Your public key has been saved in /etc/ssh/ssh_host_ed25519_key.pub
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: The key fingerprint is:
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: SHA256:v1uLzZ9r22pXF2QsO+gHOGERom4ErsGOiGgKtl7LE5E root@ip-10-31-43-117.us-east-1.aws.redhat.com
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: The key's randomart image is:
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: +--[ED25519 256]--+
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: |   .   . oo   .  |
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: |. . . . .o   . + |
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: | o ..o  . o . =  |
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: |* oEo    o o o . |
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: |*=  .o  S o . . .|
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: |= ...    . . .  o|
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: |.. ..     . o   o|
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: |. o..      * .oo.|
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: | . o.     +.++*=.|
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[790]: +----[SHA256]-----+
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: Finished cloud-init.service - Initial cloud-init job (metadata service crawler).
░░ Subject: A start job for unit cloud-init.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit cloud-init.service has finished successfully.
░░ 
░░ The job identifier is 274.
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: Reached target cloud-config.target - Cloud-config availability.
░░ Subject: A start job for unit cloud-config.target has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit cloud-config.target has finished successfully.
░░ 
░░ The job identifier is 277.
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: Reached target network-online.target - Network is Online.
░░ Subject: A start job for unit network-online.target has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit network-online.target has finished successfully.
░░ 
░░ The job identifier is 203.
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: Starting cloud-config.service - Apply the settings specified in cloud-config...
░░ Subject: A start job for unit cloud-config.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit cloud-config.service has begun execution.
░░ 
░░ The job identifier is 276.
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: Starting kdump.service - Crash recovery kernel arming...
░░ Subject: A start job for unit kdump.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit kdump.service has begun execution.
░░ 
░░ The job identifier is 256.
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: Starting restraintd.service - The restraint harness....
░░ Subject: A start job for unit restraintd.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit restraintd.service has begun execution.
░░ 
░░ The job identifier is 239.
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: Starting rpc-statd-notify.service - Notify NFS peers of a restart...
░░ Subject: A start job for unit rpc-statd-notify.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit rpc-statd-notify.service has begun execution.
░░ 
░░ The job identifier is 242.
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: Starting sshd.service - OpenSSH server daemon...
░░ Subject: A start job for unit sshd.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit sshd.service has begun execution.
░░ 
░░ The job identifier is 261.
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com sm-notify[872]: Version 2.7.1 starting
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: Started rpc-statd-notify.service - Notify NFS peers of a restart.
░░ Subject: A start job for unit rpc-statd-notify.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit rpc-statd-notify.service has finished successfully.
░░ 
░░ The job identifier is 242.
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com (sshd)[873]: sshd.service: Referenced but unset environment variable evaluates to an empty string: OPTIONS
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: Started restraintd.service - The restraint harness..
░░ Subject: A start job for unit restraintd.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit restraintd.service has finished successfully.
░░ 
░░ The job identifier is 239.
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com sshd[873]: Server listening on 0.0.0.0 port 22.
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com sshd[873]: Server listening on :: port 22.
Dec 14 11:26:40 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: Started sshd.service - OpenSSH server daemon.
░░ Subject: A start job for unit sshd.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit sshd.service has finished successfully.
░░ 
░░ The job identifier is 261.
Dec 14 11:26:41 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[916]: Cloud-init v. 24.1.4-21.el10 running 'modules:config' at Sat, 14 Dec 2024 16:26:41 +0000. Up 17.21 seconds.
Dec 14 11:26:41 ip-10-31-43-117.us-east-1.aws.redhat.com sshd[873]: Received signal 15; terminating.
Dec 14 11:26:41 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: Stopping sshd.service - OpenSSH server daemon...
░░ Subject: A stop job for unit sshd.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A stop job for unit sshd.service has begun execution.
░░ 
░░ The job identifier is 507.
Dec 14 11:26:41 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: sshd.service: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ The unit sshd.service has successfully entered the 'dead' state.
Dec 14 11:26:41 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: Stopped sshd.service - OpenSSH server daemon.
░░ Subject: A stop job for unit sshd.service has finished
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A stop job for unit sshd.service has finished.
░░ 
░░ The job identifier is 507 and the job result is done.
Dec 14 11:26:41 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: Stopped target sshd-keygen.target.
░░ Subject: A stop job for unit sshd-keygen.target has finished
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A stop job for unit sshd-keygen.target has finished.
░░ 
░░ The job identifier is 591 and the job result is done.
Dec 14 11:26:41 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: Stopping sshd-keygen.target...
░░ Subject: A stop job for unit sshd-keygen.target has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A stop job for unit sshd-keygen.target has begun execution.
░░ 
░░ The job identifier is 591.
Dec 14 11:26:41 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: ssh-host-keys-migration.service - Update OpenSSH host key permissions was skipped because of an unmet condition check (ConditionPathExists=!/var/lib/.ssh-host-keys-migration).
░░ Subject: A start job for unit ssh-host-keys-migration.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit ssh-host-keys-migration.service has finished successfully.
░░ 
░░ The job identifier is 590.
Dec 14 11:26:41 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: sshd-keygen@ecdsa.service - OpenSSH ecdsa Server Key Generation was skipped because of an unmet condition check (ConditionPathExists=!/run/systemd/generator.early/multi-user.target.wants/cloud-init.target).
░░ Subject: A start job for unit sshd-keygen@ecdsa.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit sshd-keygen@ecdsa.service has finished successfully.
░░ 
░░ The job identifier is 586.
Dec 14 11:26:41 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: sshd-keygen@ed25519.service - OpenSSH ed25519 Server Key Generation was skipped because of an unmet condition check (ConditionPathExists=!/run/systemd/generator.early/multi-user.target.wants/cloud-init.target).
░░ Subject: A start job for unit sshd-keygen@ed25519.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit sshd-keygen@ed25519.service has finished successfully.
░░ 
░░ The job identifier is 588.
Dec 14 11:26:41 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: sshd-keygen@rsa.service - OpenSSH rsa Server Key Generation was skipped because of an unmet condition check (ConditionPathExists=!/run/systemd/generator.early/multi-user.target.wants/cloud-init.target).
░░ Subject: A start job for unit sshd-keygen@rsa.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit sshd-keygen@rsa.service has finished successfully.
░░ 
░░ The job identifier is 589.
Dec 14 11:26:41 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: Reached target sshd-keygen.target.
░░ Subject: A start job for unit sshd-keygen.target has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit sshd-keygen.target has finished successfully.
░░ 
░░ The job identifier is 591.
Dec 14 11:26:41 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: Starting sshd.service - OpenSSH server daemon...
░░ Subject: A start job for unit sshd.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit sshd.service has begun execution.
░░ 
░░ The job identifier is 507.
Dec 14 11:26:41 ip-10-31-43-117.us-east-1.aws.redhat.com (sshd)[920]: sshd.service: Referenced but unset environment variable evaluates to an empty string: OPTIONS
Dec 14 11:26:41 ip-10-31-43-117.us-east-1.aws.redhat.com sshd[920]: Server listening on 0.0.0.0 port 22.
Dec 14 11:26:41 ip-10-31-43-117.us-east-1.aws.redhat.com sshd[920]: Server listening on :: port 22.
Dec 14 11:26:41 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: Started sshd.service - OpenSSH server daemon.
░░ Subject: A start job for unit sshd.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit sshd.service has finished successfully.
░░ 
░░ The job identifier is 507.
Dec 14 11:26:41 ip-10-31-43-117.us-east-1.aws.redhat.com restraintd[877]: Listening on http://localhost:8081
Dec 14 11:26:41 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: Finished cloud-config.service - Apply the settings specified in cloud-config.
░░ Subject: A start job for unit cloud-config.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit cloud-config.service has finished successfully.
░░ 
░░ The job identifier is 276.
Dec 14 11:26:41 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: Starting cloud-final.service - Execute cloud user/final scripts...
░░ Subject: A start job for unit cloud-final.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit cloud-final.service has begun execution.
░░ 
░░ The job identifier is 278.
Dec 14 11:26:41 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: Starting systemd-user-sessions.service - Permit User Sessions...
░░ Subject: A start job for unit systemd-user-sessions.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit systemd-user-sessions.service has begun execution.
░░ 
░░ The job identifier is 240.
Dec 14 11:26:41 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: Finished systemd-user-sessions.service - Permit User Sessions.
░░ Subject: A start job for unit systemd-user-sessions.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit systemd-user-sessions.service has finished successfully.
░░ 
░░ The job identifier is 240.
Dec 14 11:26:41 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: Started crond.service - Command Scheduler.
░░ Subject: A start job for unit crond.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit crond.service has finished successfully.
░░ 
░░ The job identifier is 255.
Dec 14 11:26:41 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: Started getty@tty1.service - Getty on tty1.
░░ Subject: A start job for unit getty@tty1.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit getty@tty1.service has finished successfully.
░░ 
░░ The job identifier is 227.
Dec 14 11:26:41 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: Started serial-getty@ttyS0.service - Serial Getty on ttyS0.
░░ Subject: A start job for unit serial-getty@ttyS0.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit serial-getty@ttyS0.service has finished successfully.
░░ 
░░ The job identifier is 231.
Dec 14 11:26:41 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: Reached target getty.target - Login Prompts.
░░ Subject: A start job for unit getty.target has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit getty.target has finished successfully.
░░ 
░░ The job identifier is 226.
Dec 14 11:26:41 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: Reached target multi-user.target - Multi-User System.
░░ Subject: A start job for unit multi-user.target has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit multi-user.target has finished successfully.
░░ 
░░ The job identifier is 121.
Dec 14 11:26:41 ip-10-31-43-117.us-east-1.aws.redhat.com crond[926]: (CRON) STARTUP (1.7.0)
Dec 14 11:26:41 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: Starting systemd-update-utmp-runlevel.service - Record Runlevel Change in UTMP...
░░ Subject: A start job for unit systemd-update-utmp-runlevel.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit systemd-update-utmp-runlevel.service has begun execution.
░░ 
░░ The job identifier is 257.
Dec 14 11:26:41 ip-10-31-43-117.us-east-1.aws.redhat.com crond[926]: (CRON) INFO (Syslog will be used instead of sendmail.)
Dec 14 11:26:41 ip-10-31-43-117.us-east-1.aws.redhat.com crond[926]: (CRON) INFO (RANDOM_DELAY will be scaled with factor 98% if used.)
Dec 14 11:26:41 ip-10-31-43-117.us-east-1.aws.redhat.com crond[926]: (CRON) INFO (running with inotify support)
Dec 14 11:26:41 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: systemd-update-utmp-runlevel.service: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ The unit systemd-update-utmp-runlevel.service has successfully entered the 'dead' state.
Dec 14 11:26:41 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: Finished systemd-update-utmp-runlevel.service - Record Runlevel Change in UTMP.
░░ Subject: A start job for unit systemd-update-utmp-runlevel.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit systemd-update-utmp-runlevel.service has finished successfully.
░░ 
░░ The job identifier is 257.
Dec 14 11:26:41 ip-10-31-43-117.us-east-1.aws.redhat.com kdumpctl[879]: kdump: Detected change(s) in the following file(s):  /etc/fstab
Dec 14 11:26:41 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[1020]: Cloud-init v. 24.1.4-21.el10 running 'modules:final' at Sat, 14 Dec 2024 16:26:41 +0000. Up 17.71 seconds.
Dec 14 11:26:41 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[1076]: #############################################################
Dec 14 11:26:41 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[1077]: -----BEGIN SSH HOST KEY FINGERPRINTS-----
Dec 14 11:26:41 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[1079]: 256 SHA256:RdoWwMOSgw51mBD28FuLabo0FGe7XvtI9kDaq60uA5s root@ip-10-31-43-117.us-east-1.aws.redhat.com (ECDSA)
Dec 14 11:26:41 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[1085]: 256 SHA256:v1uLzZ9r22pXF2QsO+gHOGERom4ErsGOiGgKtl7LE5E root@ip-10-31-43-117.us-east-1.aws.redhat.com (ED25519)
Dec 14 11:26:41 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[1090]: 3072 SHA256:4kPeOc6dyUInlbtyxVsYnSTAiNqxtK7A/xVLG9s/emE root@ip-10-31-43-117.us-east-1.aws.redhat.com (RSA)
Dec 14 11:26:41 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[1092]: -----END SSH HOST KEY FINGERPRINTS-----
Dec 14 11:26:41 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[1095]: #############################################################
Dec 14 11:26:41 ip-10-31-43-117.us-east-1.aws.redhat.com cloud-init[1020]: Cloud-init v. 24.1.4-21.el10 finished at Sat, 14 Dec 2024 16:26:41 +0000. Datasource DataSourceEc2Local.  Up 17.88 seconds
Dec 14 11:26:41 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: Finished cloud-final.service - Execute cloud user/final scripts.
░░ Subject: A start job for unit cloud-final.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit cloud-final.service has finished successfully.
░░ 
░░ The job identifier is 278.
Dec 14 11:26:41 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: Reached target cloud-init.target - Cloud-init target.
░░ Subject: A start job for unit cloud-init.target has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit cloud-init.target has finished successfully.
░░ 
░░ The job identifier is 273.
Dec 14 11:26:43 ip-10-31-43-117.us-east-1.aws.redhat.com irqbalance[649]: Cannot change IRQ 0 affinity: Permission denied
Dec 14 11:26:43 ip-10-31-43-117.us-east-1.aws.redhat.com irqbalance[649]: IRQ 0 affinity is now unmanaged
Dec 14 11:26:43 ip-10-31-43-117.us-east-1.aws.redhat.com irqbalance[649]: Cannot change IRQ 48 affinity: Permission denied
Dec 14 11:26:43 ip-10-31-43-117.us-east-1.aws.redhat.com irqbalance[649]: IRQ 48 affinity is now unmanaged
Dec 14 11:26:43 ip-10-31-43-117.us-east-1.aws.redhat.com irqbalance[649]: Cannot change IRQ 49 affinity: Permission denied
Dec 14 11:26:43 ip-10-31-43-117.us-east-1.aws.redhat.com irqbalance[649]: IRQ 49 affinity is now unmanaged
Dec 14 11:26:43 ip-10-31-43-117.us-east-1.aws.redhat.com irqbalance[649]: Cannot change IRQ 50 affinity: Permission denied
Dec 14 11:26:43 ip-10-31-43-117.us-east-1.aws.redhat.com irqbalance[649]: IRQ 50 affinity is now unmanaged
Dec 14 11:26:43 ip-10-31-43-117.us-east-1.aws.redhat.com irqbalance[649]: Cannot change IRQ 51 affinity: Permission denied
Dec 14 11:26:43 ip-10-31-43-117.us-east-1.aws.redhat.com irqbalance[649]: IRQ 51 affinity is now unmanaged
Dec 14 11:26:43 ip-10-31-43-117.us-east-1.aws.redhat.com irqbalance[649]: Cannot change IRQ 52 affinity: Permission denied
Dec 14 11:26:43 ip-10-31-43-117.us-east-1.aws.redhat.com irqbalance[649]: IRQ 52 affinity is now unmanaged
Dec 14 11:26:43 ip-10-31-43-117.us-east-1.aws.redhat.com irqbalance[649]: Cannot change IRQ 53 affinity: Permission denied
Dec 14 11:26:43 ip-10-31-43-117.us-east-1.aws.redhat.com irqbalance[649]: IRQ 53 affinity is now unmanaged
Dec 14 11:26:43 ip-10-31-43-117.us-east-1.aws.redhat.com irqbalance[649]: Cannot change IRQ 54 affinity: Permission denied
Dec 14 11:26:43 ip-10-31-43-117.us-east-1.aws.redhat.com irqbalance[649]: IRQ 54 affinity is now unmanaged
Dec 14 11:26:43 ip-10-31-43-117.us-east-1.aws.redhat.com irqbalance[649]: Cannot change IRQ 55 affinity: Permission denied
Dec 14 11:26:43 ip-10-31-43-117.us-east-1.aws.redhat.com irqbalance[649]: IRQ 55 affinity is now unmanaged
Dec 14 11:26:43 ip-10-31-43-117.us-east-1.aws.redhat.com irqbalance[649]: Cannot change IRQ 56 affinity: Permission denied
Dec 14 11:26:43 ip-10-31-43-117.us-east-1.aws.redhat.com irqbalance[649]: IRQ 56 affinity is now unmanaged
Dec 14 11:26:43 ip-10-31-43-117.us-east-1.aws.redhat.com irqbalance[649]: Cannot change IRQ 57 affinity: Permission denied
Dec 14 11:26:43 ip-10-31-43-117.us-east-1.aws.redhat.com irqbalance[649]: IRQ 57 affinity is now unmanaged
Dec 14 11:26:43 ip-10-31-43-117.us-east-1.aws.redhat.com irqbalance[649]: Cannot change IRQ 58 affinity: Permission denied
Dec 14 11:26:43 ip-10-31-43-117.us-east-1.aws.redhat.com irqbalance[649]: IRQ 58 affinity is now unmanaged
Dec 14 11:26:43 ip-10-31-43-117.us-east-1.aws.redhat.com irqbalance[649]: Cannot change IRQ 59 affinity: Permission denied
Dec 14 11:26:43 ip-10-31-43-117.us-east-1.aws.redhat.com irqbalance[649]: IRQ 59 affinity is now unmanaged
Dec 14 11:26:44 ip-10-31-43-117.us-east-1.aws.redhat.com kernel: block xvda: the capability attribute has been deprecated.
Dec 14 11:26:45 ip-10-31-43-117.us-east-1.aws.redhat.com chronyd[664]: Selected source 10.2.32.38
Dec 14 11:26:45 ip-10-31-43-117.us-east-1.aws.redhat.com kdumpctl[879]: kdump: Rebuilding /boot/initramfs-6.12.0-31.el10.x86_64kdump.img
Dec 14 11:26:45 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1384]: dracut-103-1.el10
Dec 14 11:26:45 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1387]: Executing: /usr/bin/dracut --list-modules
Dec 14 11:26:45 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1458]: dracut-103-1.el10
Dec 14 11:26:45 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Executing: /usr/bin/dracut --add kdumpbase --quiet --hostonly --hostonly-cmdline --hostonly-i18n --hostonly-mode strict --hostonly-nics  --aggressive-strip --omit "rdma plymouth resume ifcfg earlykdump" --mount "/dev/disk/by-uuid/f3bb1e80-fac3-4b5e-93f6-d763469176c6 /sysroot xfs rw,relatime,seclabel,attr2,inode64,logbufs=8,logbsize=32k,noquota" --add squash-squashfs --squash-compressor zstd --no-hostonly-default-device -f /boot/initramfs-6.12.0-31.el10.x86_64kdump.img 6.12.0-31.el10.x86_64
Dec 14 11:26:45 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Module 'systemd-bsod' will not be installed, because command '/usr/lib/systemd/systemd-bsod' could not be found!
Dec 14 11:26:45 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Module 'systemd-networkd' will not be installed, because command '/usr/lib/systemd/systemd-networkd' could not be found!
Dec 14 11:26:45 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Module 'systemd-networkd' will not be installed, because command '/usr/lib/systemd/systemd-networkd-wait-online' could not be found!
Dec 14 11:26:45 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Module 'systemd-pcrphase' will not be installed, because command '/usr/lib/systemd/systemd-pcrphase' could not be found!
Dec 14 11:26:45 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Module 'systemd-portabled' will not be installed, because command 'portablectl' could not be found!
Dec 14 11:26:45 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Module 'systemd-portabled' will not be installed, because command '/usr/lib/systemd/systemd-portabled' could not be found!
Dec 14 11:26:45 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Module 'systemd-resolved' will not be installed, because command '/usr/lib/systemd/systemd-resolved' could not be found!
Dec 14 11:26:45 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Module 'systemd-timesyncd' will not be installed, because command '/usr/lib/systemd/systemd-timesyncd' could not be found!
Dec 14 11:26:45 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Module 'systemd-timesyncd' will not be installed, because command '/usr/lib/systemd/systemd-time-wait-sync' could not be found!
Dec 14 11:26:45 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Module 'busybox' will not be installed, because command 'busybox' could not be found!
Dec 14 11:26:45 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Module 'dbus-daemon' will not be installed, because command 'dbus-daemon' could not be found!
Dec 14 11:26:46 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Module 'connman' will not be installed, because command 'connmand' could not be found!
Dec 14 11:26:46 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Module 'connman' will not be installed, because command 'connmanctl' could not be found!
Dec 14 11:26:46 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Module 'connman' will not be installed, because command 'connmand-wait-online' could not be found!
Dec 14 11:26:46 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Module 'ifcfg' will not be installed, because it's in the list to be omitted!
Dec 14 11:26:46 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Module 'plymouth' will not be installed, because it's in the list to be omitted!
Dec 14 11:26:46 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: 62bluetooth: Could not find any command of '/usr/lib/bluetooth/bluetoothd /usr/libexec/bluetooth/bluetoothd'!
Dec 14 11:26:46 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Module 'btrfs' will not be installed, because command 'btrfs' could not be found!
Dec 14 11:26:46 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Module 'dmraid' will not be installed, because command 'dmraid' could not be found!
Dec 14 11:26:46 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Module 'mdraid' will not be installed, because command 'mdadm' could not be found!
Dec 14 11:26:46 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Module 'multipath' will not be installed, because command 'multipath' could not be found!
Dec 14 11:26:46 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Module 'cifs' will not be installed, because command 'mount.cifs' could not be found!
Dec 14 11:26:46 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Module 'iscsi' will not be installed, because command 'iscsi-iname' could not be found!
Dec 14 11:26:46 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Module 'iscsi' will not be installed, because command 'iscsiadm' could not be found!
Dec 14 11:26:46 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Module 'iscsi' will not be installed, because command 'iscsid' could not be found!
Dec 14 11:26:46 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Module 'nvmf' will not be installed, because command 'nvme' could not be found!
Dec 14 11:26:46 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Module 'resume' will not be installed, because it's in the list to be omitted!
Dec 14 11:26:46 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Module 'squash-erofs' will not be installed, because command 'mkfs.erofs' could not be found!
Dec 14 11:26:46 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Module 'squash-erofs' will not be installed, because command 'fsck.erofs' could not be found!
Dec 14 11:26:46 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Module 'biosdevname' will not be installed, because command 'biosdevname' could not be found!
Dec 14 11:26:46 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Module 'earlykdump' will not be installed, because it's in the list to be omitted!
Dec 14 11:26:46 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Module 'systemd-bsod' will not be installed, because command '/usr/lib/systemd/systemd-bsod' could not be found!
Dec 14 11:26:46 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Module 'systemd-pcrphase' will not be installed, because command '/usr/lib/systemd/systemd-pcrphase' could not be found!
Dec 14 11:26:46 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Module 'systemd-portabled' will not be installed, because command 'portablectl' could not be found!
Dec 14 11:26:46 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Module 'systemd-portabled' will not be installed, because command '/usr/lib/systemd/systemd-portabled' could not be found!
Dec 14 11:26:46 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Module 'systemd-resolved' will not be installed, because command '/usr/lib/systemd/systemd-resolved' could not be found!
Dec 14 11:26:46 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Module 'systemd-timesyncd' will not be installed, because command '/usr/lib/systemd/systemd-timesyncd' could not be found!
Dec 14 11:26:46 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Module 'systemd-timesyncd' will not be installed, because command '/usr/lib/systemd/systemd-time-wait-sync' could not be found!
Dec 14 11:26:46 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Module 'busybox' will not be installed, because command 'busybox' could not be found!
Dec 14 11:26:46 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Module 'dbus-daemon' will not be installed, because command 'dbus-daemon' could not be found!
Dec 14 11:26:46 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Module 'connman' will not be installed, because command 'connmand' could not be found!
Dec 14 11:26:46 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Module 'connman' will not be installed, because command 'connmanctl' could not be found!
Dec 14 11:26:46 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Module 'connman' will not be installed, because command 'connmand-wait-online' could not be found!
Dec 14 11:26:46 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: 62bluetooth: Could not find any command of '/usr/lib/bluetooth/bluetoothd /usr/libexec/bluetooth/bluetoothd'!
Dec 14 11:26:46 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Module 'btrfs' will not be installed, because command 'btrfs' could not be found!
Dec 14 11:26:46 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Module 'dmraid' will not be installed, because command 'dmraid' could not be found!
Dec 14 11:26:46 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Module 'mdraid' will not be installed, because command 'mdadm' could not be found!
Dec 14 11:26:46 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Module 'multipath' will not be installed, because command 'multipath' could not be found!
Dec 14 11:26:46 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Module 'cifs' will not be installed, because command 'mount.cifs' could not be found!
Dec 14 11:26:46 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Module 'iscsi' will not be installed, because command 'iscsi-iname' could not be found!
Dec 14 11:26:46 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Module 'iscsi' will not be installed, because command 'iscsiadm' could not be found!
Dec 14 11:26:46 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Module 'iscsi' will not be installed, because command 'iscsid' could not be found!
Dec 14 11:26:46 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Module 'nvmf' will not be installed, because command 'nvme' could not be found!
Dec 14 11:26:46 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Module 'squash-erofs' will not be installed, because command 'mkfs.erofs' could not be found!
Dec 14 11:26:46 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Module 'squash-erofs' will not be installed, because command 'fsck.erofs' could not be found!
Dec 14 11:26:46 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: *** Including module: systemd ***
Dec 14 11:26:47 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: *** Including module: fips ***
Dec 14 11:26:47 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: *** Including module: fips-crypto-policies ***
Dec 14 11:26:47 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: *** Including module: systemd-ask-password ***
Dec 14 11:26:47 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: *** Including module: systemd-initrd ***
Dec 14 11:26:47 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: *** Including module: systemd-journald ***
Dec 14 11:26:47 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: *** Including module: systemd-modules-load ***
Dec 14 11:26:47 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: *** Including module: systemd-sysctl ***
Dec 14 11:26:47 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: *** Including module: systemd-sysusers ***
Dec 14 11:26:47 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: *** Including module: systemd-tmpfiles ***
Dec 14 11:26:47 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: *** Including module: systemd-udevd ***
Dec 14 11:26:47 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: *** Including module: rngd ***
Dec 14 11:26:47 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: *** Including module: i18n ***
Dec 14 11:26:47 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: *** Including module: drm ***
Dec 14 11:26:47 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: *** Including module: prefixdevname ***
Dec 14 11:26:47 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: *** Including module: kernel-modules ***
Dec 14 11:26:48 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: *** Including module: kernel-modules-extra ***
Dec 14 11:26:48 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]:   kernel-modules-extra: configuration source "/run/depmod.d" does not exist
Dec 14 11:26:48 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]:   kernel-modules-extra: configuration source "/lib/depmod.d" does not exist
Dec 14 11:26:48 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]:   kernel-modules-extra: parsing configuration file "/etc/depmod.d/dist.conf"
Dec 14 11:26:48 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]:   kernel-modules-extra: /etc/depmod.d/dist.conf: added "updates extra built-in weak-updates" to the list of search directories
Dec 14 11:26:48 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: *** Including module: pcmcia ***
Dec 14 11:26:48 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Skipping udev rule: 60-pcmcia.rules
Dec 14 11:26:48 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: *** Including module: fstab-sys ***
Dec 14 11:26:48 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: *** Including module: hwdb ***
Dec 14 11:26:48 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: *** Including module: rootfs-block ***
Dec 14 11:26:48 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: *** Including module: squash-squashfs ***
Dec 14 11:26:48 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: *** Including module: terminfo ***
Dec 14 11:26:48 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: *** Including module: udev-rules ***
Dec 14 11:26:48 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: *** Including module: dracut-systemd ***
Dec 14 11:26:48 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: *** Including module: usrmount ***
Dec 14 11:26:48 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: *** Including module: base ***
Dec 14 11:26:48 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: *** Including module: fs-lib ***
Dec 14 11:26:48 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: *** Including module: kdumpbase ***
Dec 14 11:26:49 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: *** Including module: memstrack ***
Dec 14 11:26:49 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: *** Including module: microcode_ctl-fw_dir_override ***
Dec 14 11:26:49 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]:   microcode_ctl module: mangling fw_dir
Dec 14 11:26:49 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]:     microcode_ctl: reset fw_dir to "/lib/firmware/updates /lib/firmware"
Dec 14 11:26:49 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]:     microcode_ctl: processing data directory  "/usr/share/microcode_ctl/ucode_with_caveats/intel"...
Dec 14 11:26:49 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state.
Dec 14 11:26:49 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]:       microcode_ctl: intel: caveats check for kernel version "6.12.0-31.el10.x86_64" passed, adding "/usr/share/microcode_ctl/ucode_with_caveats/intel" to fw_dir variable
Dec 14 11:26:49 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]:     microcode_ctl: processing data directory  "/usr/share/microcode_ctl/ucode_with_caveats/intel-06-4f-01"...
Dec 14 11:26:49 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]:     microcode_ctl: configuration "intel-06-4f-01" is ignored
Dec 14 11:26:49 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]:     microcode_ctl: final fw_dir: "/usr/share/microcode_ctl/ucode_with_caveats/intel /lib/firmware/updates /lib/firmware"
Dec 14 11:26:49 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: *** Including module: shutdown ***
Dec 14 11:26:49 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: *** Including module: squash-lib ***
Dec 14 11:26:49 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: *** Including modules done ***
Dec 14 11:26:49 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: *** Installing kernel module dependencies ***
Dec 14 11:26:49 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: *** Installing kernel module dependencies done ***
Dec 14 11:26:49 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: *** Resolving executable dependencies ***
Dec 14 11:26:50 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: *** Resolving executable dependencies done ***
Dec 14 11:26:50 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: *** Hardlinking files ***
Dec 14 11:26:50 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Mode:                     real
Dec 14 11:26:50 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Method:                   sha256
Dec 14 11:26:50 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Files:                    537
Dec 14 11:26:50 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Linked:                   25 files
Dec 14 11:26:50 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Compared:                 0 xattrs
Dec 14 11:26:50 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Compared:                 48 files
Dec 14 11:26:50 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Saved:                    13.58 MiB
Dec 14 11:26:50 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Duration:                 0.163206 seconds
Dec 14 11:26:50 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: *** Hardlinking files done ***
Dec 14 11:26:50 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: *** Generating early-microcode cpio image ***
Dec 14 11:26:51 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: *** Constructing GenuineIntel.bin ***
Dec 14 11:26:51 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: *** Constructing GenuineIntel.bin ***
Dec 14 11:26:51 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: *** Store current command line parameters ***
Dec 14 11:26:51 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: Stored kernel commandline:
Dec 14 11:26:51 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: No dracut internal kernel commandline stored in the initramfs
Dec 14 11:26:51 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: *** Squashing the files inside the initramfs ***
Dec 14 11:26:59 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: *** Squashing the files inside the initramfs done ***
Dec 14 11:26:59 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: *** Creating image file '/boot/initramfs-6.12.0-31.el10.x86_64kdump.img' ***
Dec 14 11:26:59 ip-10-31-43-117.us-east-1.aws.redhat.com dracut[1461]: *** Creating initramfs image file '/boot/initramfs-6.12.0-31.el10.x86_64kdump.img' done ***
Dec 14 11:26:59 ip-10-31-43-117.us-east-1.aws.redhat.com kernel: PKCS7: Message signed outside of X.509 validity window
Dec 14 11:27:00 ip-10-31-43-117.us-east-1.aws.redhat.com kdumpctl[879]: kdump: kexec: loaded kdump kernel
Dec 14 11:27:00 ip-10-31-43-117.us-east-1.aws.redhat.com kdumpctl[879]: kdump: Starting kdump: [OK]
Dec 14 11:27:00 ip-10-31-43-117.us-east-1.aws.redhat.com kdumpctl[879]: kdump: Notice: No vmcore creation test performed!
Dec 14 11:27:00 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: Finished kdump.service - Crash recovery kernel arming.
░░ Subject: A start job for unit kdump.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit kdump.service has finished successfully.
░░ 
░░ The job identifier is 256.
Dec 14 11:27:00 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: Startup finished in 1.005s (kernel) + 3.724s (initrd) + 31.760s (userspace) = 36.490s.
░░ Subject: System start-up is now complete
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ All system services necessary queued for starting at boot have been
░░ started. Note that this does not mean that the machine is now idle as services
░░ might still be busy with completing start-up.
░░ 
░░ Kernel start-up required 1005350 microseconds.
░░ 
░░ Initrd start-up required 3724049 microseconds.
░░ 
░░ Userspace start-up required 31760904 microseconds.
Dec 14 11:27:08 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: systemd-hostnamed.service: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ The unit systemd-hostnamed.service has successfully entered the 'dead' state.
Dec 14 11:27:51 ip-10-31-43-117.us-east-1.aws.redhat.com chronyd[664]: Selected source 216.66.48.42 (2.centos.pool.ntp.org)
Dec 14 11:29:16 ip-10-31-43-117.us-east-1.aws.redhat.com sshd-session[4322]: Accepted publickey for root from 10.30.34.106 port 52592 ssh2: RSA SHA256:W3cSdmPJK+d9RwU97ardijPXIZnxHswrpTHWW9oYtEU
Dec 14 11:29:16 ip-10-31-43-117.us-east-1.aws.redhat.com sshd-session[4322]: pam_systemd(sshd:session): New sd-bus connection (system-bus-pam-systemd-4322) opened.
Dec 14 11:29:16 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: Created slice user-0.slice - User Slice of UID 0.
░░ Subject: A start job for unit user-0.slice has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit user-0.slice has finished successfully.
░░ 
░░ The job identifier is 602.
Dec 14 11:29:16 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: Starting user-runtime-dir@0.service - User Runtime Directory /run/user/0...
░░ Subject: A start job for unit user-runtime-dir@0.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit user-runtime-dir@0.service has begun execution.
░░ 
░░ The job identifier is 601.
Dec 14 11:29:16 ip-10-31-43-117.us-east-1.aws.redhat.com systemd-logind[653]: New session 1 of user root.
░░ Subject: A new session 1 has been created for user root
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ Documentation: sd-login(3)
░░ 
░░ A new session with the ID 1 has been created for the user root.
░░ 
░░ The leading process of the session is 4322.
Dec 14 11:29:16 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: Finished user-runtime-dir@0.service - User Runtime Directory /run/user/0.
░░ Subject: A start job for unit user-runtime-dir@0.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit user-runtime-dir@0.service has finished successfully.
░░ 
░░ The job identifier is 601.
Dec 14 11:29:16 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: Starting user@0.service - User Manager for UID 0...
░░ Subject: A start job for unit user@0.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit user@0.service has begun execution.
░░ 
░░ The job identifier is 681.
Dec 14 11:29:16 ip-10-31-43-117.us-east-1.aws.redhat.com systemd-logind[653]: New session 2 of user root.
░░ Subject: A new session 2 has been created for user root
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ Documentation: sd-login(3)
░░ 
░░ A new session with the ID 2 has been created for the user root.
░░ 
░░ The leading process of the session is 4327.
Dec 14 11:29:16 ip-10-31-43-117.us-east-1.aws.redhat.com (systemd)[4327]: pam_unix(systemd-user:session): session opened for user root(uid=0) by root(uid=0)
Dec 14 11:29:16 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[4327]: Queued start job for default target default.target.
Dec 14 11:29:16 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[4327]: Created slice app.slice - User Application Slice.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit UNIT has finished successfully.
░░ 
░░ The job identifier is 5.
Dec 14 11:29:16 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[4327]: grub-boot-success.timer - Mark boot as successful after the user session has run 2 minutes was skipped because of an unmet condition check (ConditionUser=!@system).
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit UNIT has finished successfully.
░░ 
░░ The job identifier is 10.
Dec 14 11:29:16 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[4327]: Started systemd-tmpfiles-clean.timer - Daily Cleanup of User's Temporary Directories.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit UNIT has finished successfully.
░░ 
░░ The job identifier is 11.
Dec 14 11:29:16 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[4327]: Reached target paths.target - Paths.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit UNIT has finished successfully.
░░ 
░░ The job identifier is 12.
Dec 14 11:29:16 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[4327]: Reached target timers.target - Timers.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit UNIT has finished successfully.
░░ 
░░ The job identifier is 9.
Dec 14 11:29:16 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[4327]: Starting dbus.socket - D-Bus User Message Bus Socket...
░░ Subject: A start job for unit UNIT has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit UNIT has begun execution.
░░ 
░░ The job identifier is 4.
Dec 14 11:29:16 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[4327]: Starting systemd-tmpfiles-setup.service - Create User Files and Directories...
░░ Subject: A start job for unit UNIT has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit UNIT has begun execution.
░░ 
░░ The job identifier is 8.
Dec 14 11:29:16 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[4327]: Finished systemd-tmpfiles-setup.service - Create User Files and Directories.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit UNIT has finished successfully.
░░ 
░░ The job identifier is 8.
Dec 14 11:29:16 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[4327]: Listening on dbus.socket - D-Bus User Message Bus Socket.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit UNIT has finished successfully.
░░ 
░░ The job identifier is 4.
Dec 14 11:29:16 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[4327]: Reached target sockets.target - Sockets.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit UNIT has finished successfully.
░░ 
░░ The job identifier is 3.
Dec 14 11:29:16 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[4327]: Reached target basic.target - Basic System.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit UNIT has finished successfully.
░░ 
░░ The job identifier is 2.
Dec 14 11:29:16 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[4327]: Reached target default.target - Main User Target.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit UNIT has finished successfully.
░░ 
░░ The job identifier is 1.
Dec 14 11:29:16 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[4327]: Startup finished in 127ms.
░░ Subject: User manager start-up is now complete
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ The user manager instance for user 0 has been started. All services queued
░░ for starting have been started. Note that other services might still be starting
░░ up or be started at any later time.
░░ 
░░ Startup of the manager took 127888 microseconds.
Dec 14 11:29:16 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: Started user@0.service - User Manager for UID 0.
░░ Subject: A start job for unit user@0.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit user@0.service has finished successfully.
░░ 
░░ The job identifier is 681.
Dec 14 11:29:16 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: Started session-1.scope - Session 1 of User root.
░░ Subject: A start job for unit session-1.scope has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit session-1.scope has finished successfully.
░░ 
░░ The job identifier is 762.
Dec 14 11:29:16 ip-10-31-43-117.us-east-1.aws.redhat.com sshd-session[4322]: pam_unix(sshd:session): session opened for user root(uid=0) by root(uid=0)
Dec 14 11:29:16 ip-10-31-43-117.us-east-1.aws.redhat.com sshd-session[4338]: Received disconnect from 10.30.34.106 port 52592:11: disconnected by user
Dec 14 11:29:16 ip-10-31-43-117.us-east-1.aws.redhat.com sshd-session[4338]: Disconnected from user root 10.30.34.106 port 52592
Dec 14 11:29:16 ip-10-31-43-117.us-east-1.aws.redhat.com sshd-session[4322]: pam_systemd(sshd:session): New sd-bus connection (system-bus-pam-systemd-4322) opened.
Dec 14 11:29:16 ip-10-31-43-117.us-east-1.aws.redhat.com sshd-session[4322]: pam_unix(sshd:session): session closed for user root
Dec 14 11:29:16 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: session-1.scope: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ The unit session-1.scope has successfully entered the 'dead' state.
Dec 14 11:29:16 ip-10-31-43-117.us-east-1.aws.redhat.com systemd-logind[653]: Session 1 logged out. Waiting for processes to exit.
Dec 14 11:29:16 ip-10-31-43-117.us-east-1.aws.redhat.com systemd-logind[653]: Removed session 1.
░░ Subject: Session 1 has been terminated
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ Documentation: sd-login(3)
░░ 
░░ A session with the ID 1 has been terminated.
Dec 14 11:29:23 ip-10-31-43-117.us-east-1.aws.redhat.com sshd-session[4376]: Accepted publickey for root from 10.31.8.152 port 43942 ssh2: RSA SHA256:W3cSdmPJK+d9RwU97ardijPXIZnxHswrpTHWW9oYtEU
Dec 14 11:29:23 ip-10-31-43-117.us-east-1.aws.redhat.com sshd-session[4377]: Accepted publickey for root from 10.31.8.152 port 43954 ssh2: RSA SHA256:W3cSdmPJK+d9RwU97ardijPXIZnxHswrpTHWW9oYtEU
Dec 14 11:29:23 ip-10-31-43-117.us-east-1.aws.redhat.com sshd-session[4376]: pam_systemd(sshd:session): New sd-bus connection (system-bus-pam-systemd-4376) opened.
Dec 14 11:29:23 ip-10-31-43-117.us-east-1.aws.redhat.com sshd-session[4377]: pam_systemd(sshd:session): New sd-bus connection (system-bus-pam-systemd-4377) opened.
Dec 14 11:29:23 ip-10-31-43-117.us-east-1.aws.redhat.com systemd-logind[653]: New session 3 of user root.
░░ Subject: A new session 3 has been created for user root
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ Documentation: sd-login(3)
░░ 
░░ A new session with the ID 3 has been created for the user root.
░░ 
░░ The leading process of the session is 4376.
Dec 14 11:29:23 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: Started session-3.scope - Session 3 of User root.
░░ Subject: A start job for unit session-3.scope has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit session-3.scope has finished successfully.
░░ 
░░ The job identifier is 844.
Dec 14 11:29:23 ip-10-31-43-117.us-east-1.aws.redhat.com systemd-logind[653]: New session 4 of user root.
░░ Subject: A new session 4 has been created for user root
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ Documentation: sd-login(3)
░░ 
░░ A new session with the ID 4 has been created for the user root.
░░ 
░░ The leading process of the session is 4377.
Dec 14 11:29:23 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: Started session-4.scope - Session 4 of User root.
░░ Subject: A start job for unit session-4.scope has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit session-4.scope has finished successfully.
░░ 
░░ The job identifier is 926.
Dec 14 11:29:23 ip-10-31-43-117.us-east-1.aws.redhat.com sshd-session[4376]: pam_unix(sshd:session): session opened for user root(uid=0) by root(uid=0)
Dec 14 11:29:23 ip-10-31-43-117.us-east-1.aws.redhat.com sshd-session[4377]: pam_unix(sshd:session): session opened for user root(uid=0) by root(uid=0)
Dec 14 11:29:23 ip-10-31-43-117.us-east-1.aws.redhat.com sshd-session[4383]: Received disconnect from 10.31.8.152 port 43954:11: disconnected by user
Dec 14 11:29:23 ip-10-31-43-117.us-east-1.aws.redhat.com sshd-session[4383]: Disconnected from user root 10.31.8.152 port 43954
Dec 14 11:29:23 ip-10-31-43-117.us-east-1.aws.redhat.com sshd-session[4377]: pam_systemd(sshd:session): New sd-bus connection (system-bus-pam-systemd-4377) opened.
Dec 14 11:29:23 ip-10-31-43-117.us-east-1.aws.redhat.com sshd-session[4377]: pam_unix(sshd:session): session closed for user root
Dec 14 11:29:23 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: session-4.scope: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ The unit session-4.scope has successfully entered the 'dead' state.
Dec 14 11:29:23 ip-10-31-43-117.us-east-1.aws.redhat.com systemd-logind[653]: Session 4 logged out. Waiting for processes to exit.
Dec 14 11:29:23 ip-10-31-43-117.us-east-1.aws.redhat.com systemd-logind[653]: Removed session 4.
░░ Subject: Session 4 has been terminated
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ Documentation: sd-login(3)
░░ 
░░ A session with the ID 4 has been terminated.
Dec 14 11:29:28 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: Starting systemd-hostnamed.service - Hostname Service...
░░ Subject: A start job for unit systemd-hostnamed.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit systemd-hostnamed.service has begun execution.
░░ 
░░ The job identifier is 1008.
Dec 14 11:29:28 ip-10-31-43-117.us-east-1.aws.redhat.com systemd[1]: Started systemd-hostnamed.service - Hostname Service.
░░ Subject: A start job for unit systemd-hostnamed.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit systemd-hostnamed.service has finished successfully.
░░ 
░░ The job identifier is 1008.
Dec 14 11:29:28 managed-node1 systemd-hostnamed[5857]: Hostname set to <managed-node1> (static)
Dec 14 11:29:28 managed-node1 NetworkManager[703]: <info>  [1734193768.6492] hostname: static hostname changed from "ip-10-31-43-117.us-east-1.aws.redhat.com" to "managed-node1"
Dec 14 11:29:28 managed-node1 systemd[1]: Starting NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service...
░░ Subject: A start job for unit NetworkManager-dispatcher.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit NetworkManager-dispatcher.service has begun execution.
░░ 
░░ The job identifier is 1086.
Dec 14 11:29:28 managed-node1 systemd[1]: Started NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service.
░░ Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit NetworkManager-dispatcher.service has finished successfully.
░░ 
░░ The job identifier is 1086.
Dec 14 11:29:38 managed-node1 systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state.
Dec 14 11:29:58 managed-node1 systemd[1]: systemd-hostnamed.service: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ The unit systemd-hostnamed.service has successfully entered the 'dead' state.
Dec 14 11:30:05 managed-node1 sshd-session[6523]: Accepted publickey for root from 10.31.13.174 port 42640 ssh2: RSA SHA256:9j1blwt3wcrRiGYZQ7ZGu9axm3cDklH6/z4c+Ee8CzE
Dec 14 11:30:05 managed-node1 sshd-session[6523]: pam_systemd(sshd:session): New sd-bus connection (system-bus-pam-systemd-6523) opened.
Dec 14 11:30:05 managed-node1 systemd-logind[653]: New session 5 of user root.
░░ Subject: A new session 5 has been created for user root
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ Documentation: sd-login(3)
░░ 
░░ A new session with the ID 5 has been created for the user root.
░░ 
░░ The leading process of the session is 6523.
Dec 14 11:30:05 managed-node1 systemd[1]: Started session-5.scope - Session 5 of User root.
░░ Subject: A start job for unit session-5.scope has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit session-5.scope has finished successfully.
░░ 
░░ The job identifier is 1165.
Dec 14 11:30:05 managed-node1 sshd-session[6523]: pam_unix(sshd:session): session opened for user root(uid=0) by root(uid=0)
Dec 14 11:30:06 managed-node1 python3.12[6679]: ansible-ansible.legacy.setup Invoked with gather_subset=['all'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d
Dec 14 11:30:08 managed-node1 python3.12[6839]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Dec 14 11:30:08 managed-node1 python3.12[6970]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Dec 14 11:30:10 managed-node1 sudo[7232]:     root : TTY=pts/0 ; PWD=/root ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-qkiobsjfqecahznjrwohrybwqhyobeje ; /usr/bin/python3.12 /root/.ansible/tmp/ansible-tmp-1734193809.9793763-6984-204031990791383/AnsiballZ_dnf.py'
Dec 14 11:30:10 managed-node1 sudo[7232]: pam_systemd(sudo:session): New sd-bus connection (system-bus-pam-systemd-7232) opened.
Dec 14 11:30:10 managed-node1 sudo[7232]: pam_unix(sudo:session): session opened for user root(uid=0) by root(uid=0)
Dec 14 11:30:10 managed-node1 python3.12[7235]: ansible-ansible.legacy.dnf Invoked with name=['iptables-nft', 'podman', 'shadow-utils-subid'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None
Dec 14 11:30:27 managed-node1 kernel: SELinux:  Converting 384 SID table entries...
Dec 14 11:30:27 managed-node1 kernel: SELinux:  policy capability network_peer_controls=1
Dec 14 11:30:27 managed-node1 kernel: SELinux:  policy capability open_perms=1
Dec 14 11:30:27 managed-node1 kernel: SELinux:  policy capability extended_socket_class=1
Dec 14 11:30:27 managed-node1 kernel: SELinux:  policy capability always_check_network=0
Dec 14 11:30:27 managed-node1 kernel: SELinux:  policy capability cgroup_seclabel=1
Dec 14 11:30:27 managed-node1 kernel: SELinux:  policy capability nnp_nosuid_transition=1
Dec 14 11:30:27 managed-node1 kernel: SELinux:  policy capability genfs_seclabel_symlinks=1
Dec 14 11:30:27 managed-node1 kernel: SELinux:  policy capability ioctl_skip_cloexec=0
Dec 14 11:30:27 managed-node1 kernel: SELinux:  policy capability userspace_initial_context=0
Dec 14 11:30:34 managed-node1 kernel: SELinux:  Converting 385 SID table entries...
Dec 14 11:30:34 managed-node1 kernel: SELinux:  policy capability network_peer_controls=1
Dec 14 11:30:34 managed-node1 kernel: SELinux:  policy capability open_perms=1
Dec 14 11:30:34 managed-node1 kernel: SELinux:  policy capability extended_socket_class=1
Dec 14 11:30:34 managed-node1 kernel: SELinux:  policy capability always_check_network=0
Dec 14 11:30:34 managed-node1 kernel: SELinux:  policy capability cgroup_seclabel=1
Dec 14 11:30:34 managed-node1 kernel: SELinux:  policy capability nnp_nosuid_transition=1
Dec 14 11:30:34 managed-node1 kernel: SELinux:  policy capability genfs_seclabel_symlinks=1
Dec 14 11:30:34 managed-node1 kernel: SELinux:  policy capability ioctl_skip_cloexec=0
Dec 14 11:30:34 managed-node1 kernel: SELinux:  policy capability userspace_initial_context=0
Dec 14 11:30:42 managed-node1 kernel: SELinux:  Converting 385 SID table entries...
Dec 14 11:30:42 managed-node1 kernel: SELinux:  policy capability network_peer_controls=1
Dec 14 11:30:42 managed-node1 kernel: SELinux:  policy capability open_perms=1
Dec 14 11:30:42 managed-node1 kernel: SELinux:  policy capability extended_socket_class=1
Dec 14 11:30:42 managed-node1 kernel: SELinux:  policy capability always_check_network=0
Dec 14 11:30:42 managed-node1 kernel: SELinux:  policy capability cgroup_seclabel=1
Dec 14 11:30:42 managed-node1 kernel: SELinux:  policy capability nnp_nosuid_transition=1
Dec 14 11:30:42 managed-node1 kernel: SELinux:  policy capability genfs_seclabel_symlinks=1
Dec 14 11:30:42 managed-node1 kernel: SELinux:  policy capability ioctl_skip_cloexec=0
Dec 14 11:30:42 managed-node1 kernel: SELinux:  policy capability userspace_initial_context=0
Dec 14 11:30:43 managed-node1 setsebool[7309]: The virt_use_nfs policy boolean was changed to 1 by root
Dec 14 11:30:43 managed-node1 setsebool[7309]: The virt_sandbox_use_all_caps policy boolean was changed to 1 by root
Dec 14 11:30:52 managed-node1 kernel: SELinux:  Converting 388 SID table entries...
Dec 14 11:30:52 managed-node1 kernel: SELinux:  policy capability network_peer_controls=1
Dec 14 11:30:52 managed-node1 kernel: SELinux:  policy capability open_perms=1
Dec 14 11:30:52 managed-node1 kernel: SELinux:  policy capability extended_socket_class=1
Dec 14 11:30:52 managed-node1 kernel: SELinux:  policy capability always_check_network=0
Dec 14 11:30:52 managed-node1 kernel: SELinux:  policy capability cgroup_seclabel=1
Dec 14 11:30:52 managed-node1 kernel: SELinux:  policy capability nnp_nosuid_transition=1
Dec 14 11:30:52 managed-node1 kernel: SELinux:  policy capability genfs_seclabel_symlinks=1
Dec 14 11:30:52 managed-node1 kernel: SELinux:  policy capability ioctl_skip_cloexec=0
Dec 14 11:30:52 managed-node1 kernel: SELinux:  policy capability userspace_initial_context=0
Dec 14 11:31:00 managed-node1 kernel: SELinux:  Converting 388 SID table entries...
Dec 14 11:31:00 managed-node1 kernel: SELinux:  policy capability network_peer_controls=1
Dec 14 11:31:00 managed-node1 kernel: SELinux:  policy capability open_perms=1
Dec 14 11:31:00 managed-node1 kernel: SELinux:  policy capability extended_socket_class=1
Dec 14 11:31:00 managed-node1 kernel: SELinux:  policy capability always_check_network=0
Dec 14 11:31:00 managed-node1 kernel: SELinux:  policy capability cgroup_seclabel=1
Dec 14 11:31:00 managed-node1 kernel: SELinux:  policy capability nnp_nosuid_transition=1
Dec 14 11:31:00 managed-node1 kernel: SELinux:  policy capability genfs_seclabel_symlinks=1
Dec 14 11:31:00 managed-node1 kernel: SELinux:  policy capability ioctl_skip_cloexec=0
Dec 14 11:31:00 managed-node1 kernel: SELinux:  policy capability userspace_initial_context=0
Dec 14 11:31:17 managed-node1 systemd[1]: Started run-rdcb31fbbad404dfd86db5482f938d0b1.service - /usr/bin/systemctl start man-db-cache-update.
░░ Subject: A start job for unit run-rdcb31fbbad404dfd86db5482f938d0b1.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit run-rdcb31fbbad404dfd86db5482f938d0b1.service has finished successfully.
░░ 
░░ The job identifier is 1247.
Dec 14 11:31:17 managed-node1 systemd[1]: Reload requested from client PID 8034 ('systemctl') (unit session-5.scope)...
Dec 14 11:31:17 managed-node1 systemd[1]: Reloading...
Dec 14 11:31:17 managed-node1 systemd[1]: Reloading finished in 190 ms.
Dec 14 11:31:17 managed-node1 systemd[1]: Starting man-db-cache-update.service...
░░ Subject: A start job for unit man-db-cache-update.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit man-db-cache-update.service has begun execution.
░░ 
░░ The job identifier is 1325.
Dec 14 11:31:17 managed-node1 systemd[1]: Queuing reload/restart jobs for marked units…
Dec 14 11:31:18 managed-node1 sudo[7232]: pam_unix(sudo:session): session closed for user root
Dec 14 11:31:18 managed-node1 python3.12[8229]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Dec 14 11:31:19 managed-node1 python3.12[8367]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None
Dec 14 11:31:19 managed-node1 systemd[1]: man-db-cache-update.service: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ The unit man-db-cache-update.service has successfully entered the 'dead' state.
Dec 14 11:31:19 managed-node1 systemd[1]: Finished man-db-cache-update.service.
░░ Subject: A start job for unit man-db-cache-update.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit man-db-cache-update.service has finished successfully.
░░ 
░░ The job identifier is 1325.
Dec 14 11:31:19 managed-node1 systemd[1]: run-rdcb31fbbad404dfd86db5482f938d0b1.service: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ The unit run-rdcb31fbbad404dfd86db5482f938d0b1.service has successfully entered the 'dead' state.
Dec 14 11:31:20 managed-node1 python3.12[8503]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Dec 14 11:31:22 managed-node1 python3.12[8636]: ansible-tempfile Invoked with prefix=lsr_ suffix=_podman state=directory path=None
Dec 14 11:31:22 managed-node1 python3.12[8767]: ansible-file Invoked with path=/tmp/lsr_6ehua9m0_podman/auth state=directory mode=0700 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Dec 14 11:31:23 managed-node1 python3.12[8898]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Dec 14 11:31:24 managed-node1 python3.12[9029]: ansible-ansible.legacy.dnf Invoked with name=['python3-pyasn1', 'python3-cryptography', 'python3-dbus'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None
Dec 14 11:31:26 managed-node1 python3.12[9165]: ansible-ansible.legacy.dnf Invoked with name=['certmonger', 'python3-packaging'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None
Dec 14 11:31:27 managed-node1 dbus-broker-launch[637]: Noticed file-system modification, trigger reload.
░░ Subject: A configuration directory was written to
░░ Defined-By: dbus-broker
░░ Support: https://groups.google.com/forum/#!forum/bus1-devel
░░ 
░░ A write was detected to one of the directories containing D-Bus configuration
░░ files, triggering a configuration reload.
░░ 
░░ This functionality exists for backwards compatibility to pick up changes to
░░ D-Bus configuration without an explicit reolad request. Typically when
░░ installing or removing third-party software causes D-Bus configuration files
░░ to be added or removed.
░░ 
░░ It is worth noting that this may cause partial configuration to be loaded in
░░ case dispatching this notification races with the writing of the configuration
░░ files. However, a future notification will then cause the configuration to be
░░ reladed again.
Dec 14 11:31:27 managed-node1 dbus-broker-launch[637]: Noticed file-system modification, trigger reload.
░░ Subject: A configuration directory was written to
░░ Defined-By: dbus-broker
░░ Support: https://groups.google.com/forum/#!forum/bus1-devel
░░ 
░░ A write was detected to one of the directories containing D-Bus configuration
░░ files, triggering a configuration reload.
░░ 
░░ This functionality exists for backwards compatibility to pick up changes to
░░ D-Bus configuration without an explicit reolad request. Typically when
░░ installing or removing third-party software causes D-Bus configuration files
░░ to be added or removed.
░░ 
░░ It is worth noting that this may cause partial configuration to be loaded in
░░ case dispatching this notification races with the writing of the configuration
░░ files. However, a future notification will then cause the configuration to be
░░ reladed again.
Dec 14 11:31:27 managed-node1 dbus-broker-launch[637]: Noticed file-system modification, trigger reload.
░░ Subject: A configuration directory was written to
░░ Defined-By: dbus-broker
░░ Support: https://groups.google.com/forum/#!forum/bus1-devel
░░ 
░░ A write was detected to one of the directories containing D-Bus configuration
░░ files, triggering a configuration reload.
░░ 
░░ This functionality exists for backwards compatibility to pick up changes to
░░ D-Bus configuration without an explicit reolad request. Typically when
░░ installing or removing third-party software causes D-Bus configuration files
░░ to be added or removed.
░░ 
░░ It is worth noting that this may cause partial configuration to be loaded in
░░ case dispatching this notification races with the writing of the configuration
░░ files. However, a future notification will then cause the configuration to be
░░ reladed again.
Dec 14 11:31:27 managed-node1 systemd[1]: Reload requested from client PID 9173 ('systemctl') (unit session-5.scope)...
Dec 14 11:31:27 managed-node1 systemd[1]: Reloading...
Dec 14 11:31:28 managed-node1 systemd[1]: Reloading finished in 183 ms.
Dec 14 11:31:28 managed-node1 systemd[1]: Started run-r3d4cf19d1fc24d23b770a4063f70f37f.service - /usr/bin/systemctl start man-db-cache-update.
░░ Subject: A start job for unit run-r3d4cf19d1fc24d23b770a4063f70f37f.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit run-r3d4cf19d1fc24d23b770a4063f70f37f.service has finished successfully.
░░ 
░░ The job identifier is 1407.
Dec 14 11:31:28 managed-node1 systemd[1]: Starting man-db-cache-update.service...
░░ Subject: A start job for unit man-db-cache-update.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit man-db-cache-update.service has begun execution.
░░ 
░░ The job identifier is 1485.
Dec 14 11:31:28 managed-node1 systemd[1]: Reload requested from client PID 9234 ('systemctl') (unit session-5.scope)...
Dec 14 11:31:28 managed-node1 systemd[1]: Reloading...
Dec 14 11:31:28 managed-node1 systemd[1]: Reloading finished in 291 ms.
Dec 14 11:31:28 managed-node1 systemd[1]: Queuing reload/restart jobs for marked units…
Dec 14 11:31:29 managed-node1 systemd[1]: man-db-cache-update.service: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ The unit man-db-cache-update.service has successfully entered the 'dead' state.
Dec 14 11:31:29 managed-node1 systemd[1]: Finished man-db-cache-update.service.
░░ Subject: A start job for unit man-db-cache-update.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit man-db-cache-update.service has finished successfully.
░░ 
░░ The job identifier is 1485.
Dec 14 11:31:29 managed-node1 systemd[1]: run-r3d4cf19d1fc24d23b770a4063f70f37f.service: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ The unit run-r3d4cf19d1fc24d23b770a4063f70f37f.service has successfully entered the 'dead' state.
Dec 14 11:31:29 managed-node1 python3.12[9425]: ansible-file Invoked with name=/etc/certmonger//pre-scripts owner=root group=root mode=0700 state=directory path=/etc/certmonger//pre-scripts recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Dec 14 11:31:29 managed-node1 python3.12[9556]: ansible-file Invoked with name=/etc/certmonger//post-scripts owner=root group=root mode=0700 state=directory path=/etc/certmonger//post-scripts recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Dec 14 11:31:30 managed-node1 python3.12[9687]: ansible-ansible.legacy.systemd Invoked with name=certmonger state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None
Dec 14 11:31:30 managed-node1 systemd[1]: Reload requested from client PID 9690 ('systemctl') (unit session-5.scope)...
Dec 14 11:31:30 managed-node1 systemd[1]: Reloading...
Dec 14 11:31:30 managed-node1 systemd[1]: Reloading finished in 186 ms.
Dec 14 11:31:30 managed-node1 systemd[1]: Starting fstrim.service - Discard unused blocks on filesystems from /etc/fstab...
░░ Subject: A start job for unit fstrim.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit fstrim.service has begun execution.
░░ 
░░ The job identifier is 1563.
Dec 14 11:31:31 managed-node1 systemd[1]: Starting certmonger.service - Certificate monitoring and PKI enrollment...
░░ Subject: A start job for unit certmonger.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit certmonger.service has begun execution.
░░ 
░░ The job identifier is 1641.
Dec 14 11:31:31 managed-node1 (rtmonger)[9745]: certmonger.service: Referenced but unset environment variable evaluates to an empty string: OPTS
Dec 14 11:31:31 managed-node1 systemd[1]: fstrim.service: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ The unit fstrim.service has successfully entered the 'dead' state.
Dec 14 11:31:31 managed-node1 systemd[1]: Finished fstrim.service - Discard unused blocks on filesystems from /etc/fstab.
░░ Subject: A start job for unit fstrim.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit fstrim.service has finished successfully.
░░ 
░░ The job identifier is 1563.
Dec 14 11:31:31 managed-node1 systemd[1]: Started certmonger.service - Certificate monitoring and PKI enrollment.
░░ Subject: A start job for unit certmonger.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit certmonger.service has finished successfully.
░░ 
░░ The job identifier is 1641.
Dec 14 11:31:31 managed-node1 python3.12[9904]: ansible-fedora.linux_system_roles.certificate_request Invoked with name=podman_registry dns=['localhost', '127.0.0.1'] directory=/etc/pki/tls wait=True ca=self-sign __header=#
                                                # Ansible managed
                                                #
                                                # system_role:certificate
                                                 provider_config_directory=/etc/certmonger provider=certmonger key_usage=['digitalSignature', 'keyEncipherment'] extended_key_usage=['id-kp-serverAuth', 'id-kp-clientAuth'] auto_renew=True ip=None email=None common_name=None country=None state=None locality=None organization=None organizational_unit=None contact_email=None key_size=None owner=None group=None mode=None principal=None run_before=None run_after=None
Dec 14 11:31:31 managed-node1 certmonger[9745]: 2024-12-14 11:31:31 [9745] Wrote to /var/lib/certmonger/requests/20241214163131
Dec 14 11:31:31 managed-node1 certmonger[9745]: 2024-12-14 11:31:31 [9745] Wrote to /var/lib/certmonger/requests/20241214163131
Dec 14 11:31:31 managed-node1 certmonger[9745]: 2024-12-14 11:31:31 [9745] Wrote to /var/lib/certmonger/requests/20241214163131
Dec 14 11:31:31 managed-node1 certmonger[9745]: 2024-12-14 11:31:31 [9745] Wrote to /var/lib/certmonger/requests/20241214163131
Dec 14 11:31:31 managed-node1 certmonger[9745]: 2024-12-14 11:31:31 [9745] Wrote to /var/lib/certmonger/requests/20241214163131
Dec 14 11:31:31 managed-node1 certmonger[9745]: 2024-12-14 11:31:31 [9745] Wrote to /var/lib/certmonger/requests/20241214163131
Dec 14 11:31:31 managed-node1 certmonger[9745]: 2024-12-14 11:31:31 [9745] Wrote to /var/lib/certmonger/requests/20241214163131
Dec 14 11:31:31 managed-node1 certmonger[9745]: 2024-12-14 11:31:31 [9745] Wrote to /var/lib/certmonger/requests/20241214163131
Dec 14 11:31:31 managed-node1 certmonger[9745]: 2024-12-14 11:31:31 [9745] Wrote to /var/lib/certmonger/requests/20241214163131
Dec 14 11:31:32 managed-node1 certmonger[9745]: 2024-12-14 11:31:32 [9745] Wrote to /var/lib/certmonger/requests/20241214163131
Dec 14 11:31:32 managed-node1 certmonger[9745]: 2024-12-14 11:31:32 [9745] Wrote to /var/lib/certmonger/requests/20241214163131
Dec 14 11:31:32 managed-node1 certmonger[9745]: 2024-12-14 11:31:32 [9745] Wrote to /var/lib/certmonger/requests/20241214163131
Dec 14 11:31:32 managed-node1 certmonger[9745]: 2024-12-14 11:31:32 [9745] Wrote to /var/lib/certmonger/requests/20241214163131
Dec 14 11:31:32 managed-node1 certmonger[9745]: 2024-12-14 11:31:32 [9745] Wrote to /var/lib/certmonger/requests/20241214163131
Dec 14 11:31:32 managed-node1 certmonger[9745]: 2024-12-14 11:31:32 [9745] Wrote to /var/lib/certmonger/requests/20241214163131
Dec 14 11:31:32 managed-node1 certmonger[9745]: 2024-12-14 11:31:32 [9745] Wrote to /var/lib/certmonger/requests/20241214163131
Dec 14 11:31:32 managed-node1 certmonger[9745]: 2024-12-14 11:31:32 [9745] Wrote to /var/lib/certmonger/requests/20241214163131
Dec 14 11:31:32 managed-node1 certmonger[9745]: 2024-12-14 11:31:32 [9745] Wrote to /var/lib/certmonger/requests/20241214163131
Dec 14 11:31:32 managed-node1 certmonger[9745]: 2024-12-14 11:31:32 [9745] Wrote to /var/lib/certmonger/requests/20241214163131
Dec 14 11:31:32 managed-node1 certmonger[9745]: 2024-12-14 11:31:32 [9745] Wrote to /var/lib/certmonger/requests/20241214163131
Dec 14 11:31:32 managed-node1 certmonger[9745]: 2024-12-14 11:31:32 [9745] Wrote to /var/lib/certmonger/requests/20241214163131
Dec 14 11:31:32 managed-node1 certmonger[9745]: 2024-12-14 11:31:32 [9745] Wrote to /var/lib/certmonger/requests/20241214163131
Dec 14 11:31:32 managed-node1 certmonger[9745]: 2024-12-14 11:31:32 [9745] Wrote to /var/lib/certmonger/requests/20241214163131
Dec 14 11:31:32 managed-node1 certmonger[9745]: 2024-12-14 11:31:32 [9745] Wrote to /var/lib/certmonger/requests/20241214163131
Dec 14 11:31:32 managed-node1 certmonger[9745]: 2024-12-14 11:31:32 [9745] Wrote to /var/lib/certmonger/requests/20241214163131
Dec 14 11:31:32 managed-node1 certmonger[9745]: 2024-12-14 11:31:32 [9745] Wrote to /var/lib/certmonger/requests/20241214163131
Dec 14 11:31:32 managed-node1 certmonger[9745]: 2024-12-14 11:31:32 [9745] Wrote to /var/lib/certmonger/requests/20241214163131
Dec 14 11:31:32 managed-node1 certmonger[9745]: 2024-12-14 11:31:32 [9745] Wrote to /var/lib/certmonger/requests/20241214163131
Dec 14 11:31:32 managed-node1 certmonger[9745]: 2024-12-14 11:31:32 [9745] Wrote to /var/lib/certmonger/requests/20241214163131
Dec 14 11:31:32 managed-node1 certmonger[9919]: Certificate in file "/etc/pki/tls/certs/podman_registry.crt" issued by CA and saved.
Dec 14 11:31:32 managed-node1 certmonger[9745]: 2024-12-14 11:31:32 [9745] Wrote to /var/lib/certmonger/requests/20241214163131
Dec 14 11:31:32 managed-node1 python3.12[10050]: ansible-slurp Invoked with path=/etc/pki/tls/certs/podman_registry.crt src=/etc/pki/tls/certs/podman_registry.crt
Dec 14 11:31:33 managed-node1 python3.12[10181]: ansible-slurp Invoked with path=/etc/pki/tls/private/podman_registry.key src=/etc/pki/tls/private/podman_registry.key
Dec 14 11:31:33 managed-node1 python3.12[10312]: ansible-slurp Invoked with path=/etc/pki/tls/certs/podman_registry.crt src=/etc/pki/tls/certs/podman_registry.crt
Dec 14 11:31:34 managed-node1 python3.12[10443]: ansible-ansible.legacy.command Invoked with _raw_params=getcert stop-tracking -f /etc/pki/tls/certs/podman_registry.crt _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Dec 14 11:31:34 managed-node1 certmonger[9745]: 2024-12-14 11:31:34 [9745] Wrote to /var/lib/certmonger/requests/20241214163131
Dec 14 11:31:34 managed-node1 python3.12[10575]: ansible-file Invoked with path=/etc/pki/tls/certs/podman_registry.crt state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Dec 14 11:31:34 managed-node1 python3.12[10706]: ansible-file Invoked with path=/etc/pki/tls/private/podman_registry.key state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Dec 14 11:31:35 managed-node1 python3.12[10837]: ansible-file Invoked with path=/etc/pki/tls/certs/podman_registry.crt state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Dec 14 11:31:35 managed-node1 python3.12[10968]: ansible-ansible.legacy.stat Invoked with path=/tmp/lsr_6ehua9m0_podman/auth/registry_cert.crt follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Dec 14 11:31:36 managed-node1 python3.12[11073]: ansible-ansible.legacy.copy Invoked with dest=/tmp/lsr_6ehua9m0_podman/auth/registry_cert.crt mode=0600 src=/root/.ansible/tmp/ansible-tmp-1734193895.4098308-8404-275762393983029/.source.crt _original_basename=.fdte2xv6 follow=False checksum=a56753cb72985d5015d277aab9534d583f3099c2 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Dec 14 11:31:36 managed-node1 python3.12[11204]: ansible-ansible.legacy.stat Invoked with path=/tmp/lsr_6ehua9m0_podman/auth/registry_key.pem follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Dec 14 11:31:36 managed-node1 python3.12[11309]: ansible-ansible.legacy.copy Invoked with dest=/tmp/lsr_6ehua9m0_podman/auth/registry_key.pem mode=0600 src=/root/.ansible/tmp/ansible-tmp-1734193896.2446203-8452-271283031401177/.source.pem _original_basename=.b248p56a follow=False checksum=3c4bd2383044d864f778448dd3788c2bdf7f63a0 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Dec 14 11:31:37 managed-node1 python3.12[11440]: ansible-ansible.legacy.stat Invoked with path=/tmp/lsr_6ehua9m0_podman/auth/ca.crt follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Dec 14 11:31:37 managed-node1 python3.12[11545]: ansible-ansible.legacy.copy Invoked with dest=/tmp/lsr_6ehua9m0_podman/auth/ca.crt mode=0600 src=/root/.ansible/tmp/ansible-tmp-1734193896.9596615-8488-86842030413265/.source.crt _original_basename=.5xr0fb34 follow=False checksum=a56753cb72985d5015d277aab9534d583f3099c2 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Dec 14 11:31:38 managed-node1 python3.12[11676]: ansible-ansible.legacy.dnf Invoked with name=['httpd-tools', 'skopeo'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None
Dec 14 11:31:42 managed-node1 systemd[1]: Started run-ra2bab39c1da445c09f883f3d116af994.service - /usr/bin/systemctl start man-db-cache-update.
░░ Subject: A start job for unit run-ra2bab39c1da445c09f883f3d116af994.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit run-ra2bab39c1da445c09f883f3d116af994.service has finished successfully.
░░ 
░░ The job identifier is 1720.
Dec 14 11:31:42 managed-node1 systemd[1]: Starting man-db-cache-update.service...
░░ Subject: A start job for unit man-db-cache-update.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit man-db-cache-update.service has begun execution.
░░ 
░░ The job identifier is 1798.
Dec 14 11:31:42 managed-node1 systemd[1]: man-db-cache-update.service: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ The unit man-db-cache-update.service has successfully entered the 'dead' state.
Dec 14 11:31:42 managed-node1 systemd[1]: Finished man-db-cache-update.service.
░░ Subject: A start job for unit man-db-cache-update.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit man-db-cache-update.service has finished successfully.
░░ 
░░ The job identifier is 1798.
Dec 14 11:31:42 managed-node1 systemd[1]: run-ra2bab39c1da445c09f883f3d116af994.service: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ The unit run-ra2bab39c1da445c09f883f3d116af994.service has successfully entered the 'dead' state.
Dec 14 11:31:43 managed-node1 python3.12[12190]: ansible-ansible.legacy.command Invoked with _raw_params=podman run -d -p 127.0.0.1:5000:5000 --name podman_registry -v /tmp/lsr_6ehua9m0_podman/auth:/auth:Z -e REGISTRY_AUTH=htpasswd -e "REGISTRY_AUTH_HTPASSWD_REALM=Registry Realm" -e REGISTRY_AUTH_HTPASSWD_PATH=/auth/htpasswd -e REGISTRY_HTTP_TLS_CERTIFICATE=/auth/registry_cert.crt -e REGISTRY_HTTP_TLS_KEY=/auth/registry_key.pem quay.io/libpod/registry:2.8.2 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Dec 14 11:31:44 managed-node1 systemd[1]: var-lib-containers-storage-overlay-compat1989105179-merged.mount: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ The unit var-lib-containers-storage-overlay-compat1989105179-merged.mount has successfully entered the 'dead' state.
Dec 14 11:31:44 managed-node1 kernel: evm: overlay not supported
Dec 14 11:31:44 managed-node1 systemd[1]: var-lib-containers-storage-overlay-metacopy\x2dcheck2016384165-merged.mount: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ The unit var-lib-containers-storage-overlay-metacopy\x2dcheck2016384165-merged.mount has successfully entered the 'dead' state.
Dec 14 11:31:44 managed-node1 podman[12191]: 2024-12-14 11:31:44.032711562 -0500 EST m=+0.082834242 system refresh
Dec 14 11:31:45 managed-node1 podman[12191]: 2024-12-14 11:31:45.721997956 -0500 EST m=+1.772120507 volume create 174bec1e6ec18bfefedd3e5fc8a18b56102b9fb6012f8e02b781fd81b243eef3
Dec 14 11:31:45 managed-node1 podman[12191]: 2024-12-14 11:31:45.701531903 -0500 EST m=+1.751654658 image pull 0030ba3d620c647159c935ee778991c68ef3e51a274703753b0bc530104ef5e5 quay.io/libpod/registry:2.8.2
Dec 14 11:31:45 managed-node1 podman[12191]: 2024-12-14 11:31:45.732182371 -0500 EST m=+1.782304936 container create 013623ab41c550d739cb44f590fc1fde1e9557fa6fbbb3a443ef1d0bf0f6f57a (image=quay.io/libpod/registry:2.8.2, name=podman_registry)
Dec 14 11:31:45 managed-node1 kernel: bridge: filtering via arp/ip/ip6tables is no longer available by default. Update your scripts to load br_netfilter if you need this.
Dec 14 11:31:45 managed-node1 NetworkManager[703]: <info>  [1734193905.7779] manager: (podman0): new Bridge device (/org/freedesktop/NetworkManager/Devices/3)
Dec 14 11:31:45 managed-node1 (udev-worker)[12280]: Network interface NamePolicy= disabled on kernel command line.
Dec 14 11:31:45 managed-node1 kernel: podman0: port 1(veth0) entered blocking state
Dec 14 11:31:45 managed-node1 kernel: podman0: port 1(veth0) entered disabled state
Dec 14 11:31:45 managed-node1 kernel: veth0: entered allmulticast mode
Dec 14 11:31:45 managed-node1 kernel: veth0: entered promiscuous mode
Dec 14 11:31:45 managed-node1 kernel: podman0: port 1(veth0) entered blocking state
Dec 14 11:31:45 managed-node1 kernel: podman0: port 1(veth0) entered forwarding state
Dec 14 11:31:45 managed-node1 (udev-worker)[12198]: Network interface NamePolicy= disabled on kernel command line.
Dec 14 11:31:45 managed-node1 NetworkManager[703]: <info>  [1734193905.7970] device (veth0): carrier: link connected
Dec 14 11:31:45 managed-node1 NetworkManager[703]: <info>  [1734193905.7973] manager: (veth0): new Veth device (/org/freedesktop/NetworkManager/Devices/4)
Dec 14 11:31:45 managed-node1 NetworkManager[703]: <info>  [1734193905.7998] device (podman0): carrier: link connected
Dec 14 11:31:45 managed-node1 NetworkManager[703]: <info>  [1734193905.8066] device (podman0): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external')
Dec 14 11:31:45 managed-node1 NetworkManager[703]: <info>  [1734193905.8071] device (podman0): state change: unavailable -> disconnected (reason 'connection-assumed', managed-type: 'external')
Dec 14 11:31:45 managed-node1 NetworkManager[703]: <info>  [1734193905.8078] device (podman0): Activation: starting connection 'podman0' (08e2f206-5ac2-4e2f-8306-ac90b232dcf4)
Dec 14 11:31:45 managed-node1 NetworkManager[703]: <info>  [1734193905.8080] device (podman0): state change: disconnected -> prepare (reason 'none', managed-type: 'external')
Dec 14 11:31:45 managed-node1 NetworkManager[703]: <info>  [1734193905.8083] device (podman0): state change: prepare -> config (reason 'none', managed-type: 'external')
Dec 14 11:31:45 managed-node1 NetworkManager[703]: <info>  [1734193905.8086] device (podman0): state change: config -> ip-config (reason 'none', managed-type: 'external')
Dec 14 11:31:45 managed-node1 NetworkManager[703]: <info>  [1734193905.8089] device (podman0): state change: ip-config -> ip-check (reason 'none', managed-type: 'external')
Dec 14 11:31:45 managed-node1 systemd[1]: Starting NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service...
░░ Subject: A start job for unit NetworkManager-dispatcher.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit NetworkManager-dispatcher.service has begun execution.
░░ 
░░ The job identifier is 1877.
Dec 14 11:31:45 managed-node1 systemd[1]: Started NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service.
░░ Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit NetworkManager-dispatcher.service has finished successfully.
░░ 
░░ The job identifier is 1877.
Dec 14 11:31:45 managed-node1 NetworkManager[703]: <info>  [1734193905.8659] device (podman0): state change: ip-check -> secondaries (reason 'none', managed-type: 'external')
Dec 14 11:31:45 managed-node1 NetworkManager[703]: <info>  [1734193905.8662] device (podman0): state change: secondaries -> activated (reason 'none', managed-type: 'external')
Dec 14 11:31:45 managed-node1 NetworkManager[703]: <info>  [1734193905.8667] device (podman0): Activation: successful, device activated.
Dec 14 11:31:46 managed-node1 systemd[1]: Created slice machine.slice - Slice /machine.
░░ Subject: A start job for unit machine.slice has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit machine.slice has finished successfully.
░░ 
░░ The job identifier is 1957.
Dec 14 11:31:46 managed-node1 systemd[1]: Started libpod-conmon-013623ab41c550d739cb44f590fc1fde1e9557fa6fbbb3a443ef1d0bf0f6f57a.scope.
░░ Subject: A start job for unit libpod-conmon-013623ab41c550d739cb44f590fc1fde1e9557fa6fbbb3a443ef1d0bf0f6f57a.scope has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit libpod-conmon-013623ab41c550d739cb44f590fc1fde1e9557fa6fbbb3a443ef1d0bf0f6f57a.scope has finished successfully.
░░ 
░░ The job identifier is 1956.
Dec 14 11:31:46 managed-node1 systemd[1]: Started libpod-013623ab41c550d739cb44f590fc1fde1e9557fa6fbbb3a443ef1d0bf0f6f57a.scope - libcrun container.
░░ Subject: A start job for unit libpod-013623ab41c550d739cb44f590fc1fde1e9557fa6fbbb3a443ef1d0bf0f6f57a.scope has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ A start job for unit libpod-013623ab41c550d739cb44f590fc1fde1e9557fa6fbbb3a443ef1d0bf0f6f57a.scope has finished successfully.
░░ 
░░ The job identifier is 1962.
Dec 14 11:31:46 managed-node1 podman[12191]: 2024-12-14 11:31:46.056393753 -0500 EST m=+2.106516450 container init 013623ab41c550d739cb44f590fc1fde1e9557fa6fbbb3a443ef1d0bf0f6f57a (image=quay.io/libpod/registry:2.8.2, name=podman_registry)
Dec 14 11:31:46 managed-node1 podman[12191]: 2024-12-14 11:31:46.060003186 -0500 EST m=+2.110125831 container start 013623ab41c550d739cb44f590fc1fde1e9557fa6fbbb3a443ef1d0bf0f6f57a (image=quay.io/libpod/registry:2.8.2, name=podman_registry)
Dec 14 11:31:46 managed-node1 python3.12[12486]: ansible-wait_for Invoked with port=5000 host=127.0.0.1 timeout=300 connect_timeout=5 delay=0 active_connection_states=['ESTABLISHED', 'FIN_WAIT1', 'FIN_WAIT2', 'SYN_RECV', 'SYN_SENT', 'TIME_WAIT'] state=started sleep=1 path=None search_regex=None exclude_hosts=None msg=None
Dec 14 11:31:47 managed-node1 python3.12[12617]: ansible-ansible.legacy.command Invoked with _raw_params=podman logs podman_registry _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Dec 14 11:31:47 managed-node1 python3.12[12755]: ansible-ansible.legacy.command Invoked with _raw_params=podman pull quay.io/libpod/testimage:20210610; podman push --authfile="/tmp/lsr_6ehua9m0_podman/auth/auth.json" --cert-dir="/tmp/lsr_6ehua9m0_podman/auth" quay.io/libpod/testimage:20210610 docker://localhost:5000/libpod/testimage:20210610 _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Dec 14 11:31:49 managed-node1 podman[12757]: 2024-12-14 11:31:49.445577203 -0500 EST m=+1.809759385 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610
Dec 14 11:31:50 managed-node1 podman[12756]: 2024-12-14 11:31:49.47108663 -0500 EST m=+0.016538804 image push 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f docker://localhost:5000/libpod/testimage:20210610
Dec 14 11:31:50 managed-node1 python3.12[12918]: ansible-ansible.legacy.command Invoked with _raw_params=skopeo inspect --authfile="/tmp/lsr_6ehua9m0_podman/auth/auth.json" --cert-dir="/tmp/lsr_6ehua9m0_podman/auth" docker://localhost:5000/libpod/testimage:20210610 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Dec 14 11:31:53 managed-node1 python3.12[13187]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Dec 14 11:31:54 managed-node1 python3.12[13324]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Dec 14 11:31:55 managed-node1 systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ 
░░ The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state.
Dec 14 11:31:57 managed-node1 python3.12[13458]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Dec 14 11:31:58 managed-node1 python3.12[13591]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Dec 14 11:32:00 managed-node1 podman[13732]: 2024-12-14 11:32:00.132917471 -0500 EST m=+0.121086648 image pull-error  localhost:5000/libpod/testimage:20210610 initializing source docker://localhost:5000/libpod/testimage:20210610: reading manifest 20210610 in localhost:5000/libpod/testimage: authentication required
Dec 14 11:32:02 managed-node1 python3.12[14001]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Dec 14 11:32:04 managed-node1 python3.12[14138]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Dec 14 11:32:07 managed-node1 python3.12[14271]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Dec 14 11:32:08 managed-node1 python3.12[14404]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Dec 14 11:32:09 managed-node1 podman[14543]: 2024-12-14 11:32:09.204081254 -0500 EST m=+0.145458147 image pull-error  localhost:5000/libpod/testimage:20210610 initializing source docker://localhost:5000/libpod/testimage:20210610: pinging container registry localhost:5000: Get "https://localhost:5000/v2/": tls: failed to verify certificate: x509: certificate signed by unknown authority
Dec 14 11:32:12 managed-node1 python3.12[14812]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Dec 14 11:32:14 managed-node1 python3.12[14949]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Dec 14 11:32:16 managed-node1 python3.12[15082]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Dec 14 11:32:17 managed-node1 python3.12[15215]: ansible-file Invoked with path=/etc/containers/certs.d/localhost:5000 state=directory owner=root group=0 mode=0700 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Dec 14 11:32:20 managed-node1 python3.12[15582]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Dec 14 11:32:21 managed-node1 python3.12[15715]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Dec 14 11:32:22 managed-node1 podman[15856]: 2024-12-14 11:32:22.830406841 -0500 EST m=+0.198941135 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f localhost:5000/libpod/testimage:20210610
Dec 14 11:32:23 managed-node1 python3.12[16001]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Dec 14 11:32:23 managed-node1 python3.12[16132]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Dec 14 11:32:24 managed-node1 python3.12[16263]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Dec 14 11:32:24 managed-node1 python3.12[16368]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1734193943.8234994-10632-256821299067559/.source.yml _original_basename=.h1hpyflq follow=False checksum=fb0097683a2e5c8909a8037d64ddc1b350aed0be backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None
Dec 14 11:32:25 managed-node1 python3.12[16499]: ansible-containers.podman.podman_play Invoked with state=started kube_file=/etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None
Dec 14 11:32:25 managed-node1 python3.12[16643]: ansible-ansible.legacy.command Invoked with _raw_params=journalctl -ex _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Dec 14 11:32:28 managed-node1 python3.12[16906]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Dec 14 11:32:29 managed-node1 python3.12[17043]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Dec 14 11:32:32 managed-node1 python3.12[17176]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Dec 14 11:32:34 managed-node1 python3.12[17309]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Dec 14 11:32:36 managed-node1 python3.12[17442]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Dec 14 11:32:37 managed-node1 python3.12[17575]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Dec 14 11:32:38 managed-node1 python3.12[17707]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service scope=system state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None
Dec 14 11:32:38 managed-node1 python3.12[17840]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Dec 14 11:32:39 managed-node1 python3.12[17973]: ansible-containers.podman.podman_play Invoked with state=absent kube_file=/etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None
Dec 14 11:32:39 managed-node1 python3.12[17973]: ansible-containers.podman.podman_play version: 5.3.1, kube file /etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml
Dec 14 11:32:40 managed-node1 python3.12[18117]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Dec 14 11:32:41 managed-node1 python3.12[18248]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Dec 14 11:32:43 managed-node1 python3.12[18381]: ansible-systemd Invoked with name=auth_test_1_quadlet.service scope=system state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None
Dec 14 11:32:43 managed-node1 python3.12[18513]: ansible-stat Invoked with path=/etc/containers/systemd/auth_test_1_quadlet.container follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Dec 14 11:32:44 managed-node1 python3.12[18644]: ansible-file Invoked with path=/etc/containers/systemd/auth_test_1_quadlet.container state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Dec 14 11:32:46 managed-node1 python3.12[18775]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Dec 14 11:32:49 managed-node1 python3.12[19170]: ansible-file Invoked with path=/root/.config/containers state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Dec 14 11:32:50 managed-node1 python3.12[19301]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Dec 14 11:32:55 managed-node1 python3.12[20220]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Dec 14 11:32:56 managed-node1 python3.12[20357]: ansible-getent Invoked with database=passwd key=auth_test_user1 fail_key=False service=None split=None
Dec 14 11:32:57 managed-node1 python3.12[20489]: ansible-ansible.legacy.command Invoked with _raw_params=journalctl -ex _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None

TASK [Remove user] *************************************************************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/tests/podman/tests_auth_and_security.yml:249
Saturday 14 December 2024  11:32:57 -0500 (0:00:00.458)       0:02:53.263 ***** 
ok: [managed-node1] => {
    "changed": false,
    "name": "auth_test_user1",
    "state": "absent"
}

TASK [Remove homedir] **********************************************************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/tests/podman/tests_auth_and_security.yml:254
Saturday 14 December 2024  11:32:58 -0500 (0:00:00.705)       0:02:53.968 ***** 
ok: [managed-node1] => {
    "changed": false,
    "path": "/home/auth_test_user1",
    "state": "absent"
}

TASK [Remove local tmpdir] *****************************************************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/tests/podman/tests_auth_and_security.yml:272
Saturday 14 December 2024  11:32:58 -0500 (0:00:00.419)       0:02:54.388 ***** 
changed: [managed-node1 -> localhost] => {
    "changed": true,
    "path": "/tmp/lsr_sine48u0_podman",
    "state": "absent"
}

TASK [Clean up registry] *******************************************************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/tests/podman/tests_auth_and_security.yml:280
Saturday 14 December 2024  11:32:58 -0500 (0:00:00.325)       0:02:54.713 ***** 
included: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/tests/podman/tasks/cleanup_registry.yml for managed-node1

TASK [Get volume for cleanup] **************************************************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/tests/podman/tasks/cleanup_registry.yml:3
Saturday 14 December 2024  11:32:58 -0500 (0:00:00.048)       0:02:54.762 ***** 
ok: [managed-node1] => {
    "changed": false,
    "cmd": [
        "podman",
        "inspect",
        "podman_registry",
        "--format",
        "{{range .}}{{range .Mounts}}{{if eq .Type \"volume\"}}{{.Name}}{{end}}{{end}}{{end}}"
    ],
    "delta": "0:00:00.034791",
    "end": "2024-12-14 11:32:59.363902",
    "rc": 0,
    "start": "2024-12-14 11:32:59.329111"
}

STDOUT:

174bec1e6ec18bfefedd3e5fc8a18b56102b9fb6012f8e02b781fd81b243eef3


STDERR:

time="2024-12-14T11:32:59-05:00" level=warning msg="Failed to decode the keys [\"storage.options.overlay.pull_options\" \"storage.options.overlay.pull_options\" \"storage.options.overlay.pull_options\" \"storage.options.overlay.pull_options.enable_partial_images\" \"storage.options.overlay.pull_options.use_hard_links\" \"storage.options.overlay.pull_options.ostree_repos\" \"storage.options.overlay.pull_options.convert_images\"] from \"/usr/share/containers/storage.conf\""

TASK [Destroy registry container] **********************************************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/tests/podman/tasks/cleanup_registry.yml:11
Saturday 14 December 2024  11:32:59 -0500 (0:00:00.438)       0:02:55.200 ***** 
changed: [managed-node1] => {
    "changed": true,
    "cmd": [
        "podman",
        "rm",
        "-f",
        "podman_registry"
    ],
    "delta": "0:00:00.183413",
    "end": "2024-12-14 11:32:59.922205",
    "rc": 0,
    "start": "2024-12-14 11:32:59.738792"
}

STDOUT:

podman_registry


STDERR:

time="2024-12-14T11:32:59-05:00" level=warning msg="Failed to decode the keys [\"storage.options.overlay.pull_options\" \"storage.options.overlay.pull_options\" \"storage.options.overlay.pull_options\" \"storage.options.overlay.pull_options.enable_partial_images\" \"storage.options.overlay.pull_options.use_hard_links\" \"storage.options.overlay.pull_options.ostree_repos\" \"storage.options.overlay.pull_options.convert_images\"] from \"/usr/share/containers/storage.conf\""

TASK [Destroy volume] **********************************************************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/tests/podman/tasks/cleanup_registry.yml:15
Saturday 14 December 2024  11:32:59 -0500 (0:00:00.555)       0:02:55.756 ***** 
changed: [managed-node1] => {
    "changed": true,
    "cmd": [
        "podman",
        "volume",
        "rm",
        "174bec1e6ec18bfefedd3e5fc8a18b56102b9fb6012f8e02b781fd81b243eef3"
    ],
    "delta": "0:00:00.036851",
    "end": "2024-12-14 11:33:00.363258",
    "rc": 0,
    "start": "2024-12-14 11:33:00.326407"
}

STDOUT:

174bec1e6ec18bfefedd3e5fc8a18b56102b9fb6012f8e02b781fd81b243eef3


STDERR:

time="2024-12-14T11:33:00-05:00" level=warning msg="Failed to decode the keys [\"storage.options.overlay.pull_options\" \"storage.options.overlay.pull_options\" \"storage.options.overlay.pull_options\" \"storage.options.overlay.pull_options.enable_partial_images\" \"storage.options.overlay.pull_options.use_hard_links\" \"storage.options.overlay.pull_options.ostree_repos\" \"storage.options.overlay.pull_options.convert_images\"] from \"/usr/share/containers/storage.conf\""

TASK [Cleanup paths] ***********************************************************
task path: /tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/tests/podman/tasks/cleanup_registry.yml:19
Saturday 14 December 2024  11:33:00 -0500 (0:00:00.638)       0:02:56.395 ***** 
changed: [managed-node1] => (item=/tmp/lsr_6ehua9m0_podman) => {
    "ansible_loop_var": "item",
    "changed": true,
    "item": "/tmp/lsr_6ehua9m0_podman",
    "path": "/tmp/lsr_6ehua9m0_podman",
    "state": "absent"
}

PLAY RECAP *********************************************************************
managed-node1              : ok=261  changed=31   unreachable=0    failed=1    skipped=374  rescued=4    ignored=0   


TASKS RECAP ********************************************************************
Saturday 14 December 2024  11:33:01 -0500 (0:00:00.403)       0:02:56.799 ***** 
=============================================================================== 
fedora.linux_system_roles.podman : Ensure required packages are installed -- 68.37s
/tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:14 
Ensure test packages ---------------------------------------------------- 4.75s
/tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/tests/podman/tasks/setup_registry.yml:64 
Push test images into local registry ------------------------------------ 3.29s
/tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/tests/podman/tasks/setup_registry.yml:127 
Gathering Facts --------------------------------------------------------- 3.18s
/tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/tests/podman/tests_auth_and_security.yml:9 
fedora.linux_system_roles.certificate : Ensure provider packages are installed --- 2.90s
/tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:23 
Start registry ---------------------------------------------------------- 2.55s
/tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/tests/podman/tasks/setup_registry.yml:102 
fedora.linux_system_roles.certificate : Ensure certificate role dependencies are installed --- 2.31s
/tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:5 
fedora.linux_system_roles.certificate : Slurp the contents of the files --- 1.38s
/tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:152 
fedora.linux_system_roles.certificate : Ensure provider service is running --- 1.35s
/tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:90 
fedora.linux_system_roles.podman : Remove certs.d files ----------------- 1.22s
/tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_certs_d.yml:75 
fedora.linux_system_roles.certificate : Remove files -------------------- 1.20s
/tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:181 
fedora.linux_system_roles.podman : Gather the package facts ------------- 1.13s
/tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 
fedora.linux_system_roles.certificate : Ensure certificate requests ----- 1.00s
/tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:101 
fedora.linux_system_roles.podman : Gather the package facts ------------- 0.99s
/tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 
fedora.linux_system_roles.podman : Gather the package facts ------------- 0.97s
/tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 
fedora.linux_system_roles.podman : Ensure container images are present --- 0.93s
/tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:29 
fedora.linux_system_roles.podman : Gather the package facts ------------- 0.89s
/tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 
fedora.linux_system_roles.podman : Update containers/pods --------------- 0.87s
/tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:80 
fedora.linux_system_roles.podman : Ensure certs.d files ----------------- 0.83s
/tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_certs_d.yml:58 
Write cert for registry ------------------------------------------------- 0.83s
/tmp/collections-9ny/ansible_collections/fedora/linux_system_roles/tests/podman/tasks/setup_registry.yml:34