ansible-playbook [core 2.17.12] config file = None configured module search path = ['/root/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules'] ansible python module location = /usr/local/lib/python3.12/site-packages/ansible ansible collection location = /tmp/collections-MIG executable location = /usr/local/bin/ansible-playbook python version = 3.12.10 (main, May 9 2025, 00:00:00) [GCC 11.5.0 20240719 (Red Hat 11.5.0-5)] (/usr/bin/python3.12) jinja version = 3.1.6 libyaml = True No config file found; using defaults running playbook inside collection fedora.linux_system_roles statically imported: /tmp/collections-MIG/ansible_collections/fedora/linux_system_roles/tests/metrics/get_services_state.yml statically imported: /tmp/collections-MIG/ansible_collections/fedora/linux_system_roles/tests/metrics/restore_services_state.yml Skipping callback 'debug', as we already have a stdout callback. Skipping callback 'json', as we already have a stdout callback. Skipping callback 'jsonl', as we already have a stdout callback. Skipping callback 'default', as we already have a stdout callback. Skipping callback 'minimal', as we already have a stdout callback. Skipping callback 'oneline', as we already have a stdout callback. PLAYBOOK: tests_bz1855544.yml ************************************************** 2 plays in /tmp/collections-MIG/ansible_collections/fedora/linux_system_roles/tests/metrics/tests_bz1855544.yml PLAY [all] ********************************************************************* TASK [Include vault variables] ************************************************* task path: /tmp/collections-MIG/ansible_collections/fedora/linux_system_roles/tests/metrics/tests_bz1855544.yml:5 Saturday 24 May 2025 06:23:49 -0400 (0:00:00.017) 0:00:00.017 ********** ok: [managed-node2] => { "ansible_facts": { "pcptest_pw": { "__ansible_vault": "$ANSIBLE_VAULT;1.1;AES256\n65343431623161346664373330646165636437656265656632613961363839303132393064663934\n3137396633373562393466633037356533326566343338350a386238333034336162333932313162\n62643937336534356131376134303463306466316433366636643562633637376336653034646334\n3063663466333735390a333330366461386166633233373133326237323663333831653232646566\n3363\n" } }, "ansible_included_var_files": [ "/tmp/metrics-y7z/tests/vars/vault-variables.yml" ], "changed": false } PLAY [Bug 1855544 - metrics role should automate the setup of Grafana datasources] *** TASK [Gathering Facts] ********************************************************* task path: /tmp/collections-MIG/ansible_collections/fedora/linux_system_roles/tests/metrics/tests_bz1855544.yml:9 Saturday 24 May 2025 06:23:50 -0400 (0:00:00.015) 0:00:00.033 ********** [WARNING]: Platform linux on host managed-node2 is using the discovered Python interpreter at /usr/bin/python3.9, but future installation of another Python interpreter could change the meaning of that path. See https://docs.ansible.com/ansible- core/2.17/reference_appendices/interpreter_discovery.html for more information. ok: [managed-node2] TASK [Stop test] *************************************************************** task path: /tmp/collections-MIG/ansible_collections/fedora/linux_system_roles/tests/metrics/tests_bz1855544.yml:17 Saturday 24 May 2025 06:23:52 -0400 (0:00:02.468) 0:00:02.501 ********** META: end_host conditional evaluated to False, continuing execution for managed-node2 skipping: [managed-node2] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node2" } MSG: end_host conditional evaluated to false, continuing execution for managed-node2 TASK [Get initial state of services] ******************************************* task path: /tmp/collections-MIG/ansible_collections/fedora/linux_system_roles/tests/metrics/get_services_state.yml:3 Saturday 24 May 2025 06:23:52 -0400 (0:00:00.023) 0:00:02.524 ********** ok: [managed-node2] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "apt-daily.service": { "name": "apt-daily.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autofs.service": { "name": "autofs.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd-restricted.service": { "name": "chronyd-restricted.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "cpupower.service": { "name": "cpupower.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-broker.service": { "name": "dbus-broker.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "active", "status": "alias" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "inactive", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "grub2-systemd-integration.service": { "name": "grub2-systemd-integration.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "hv_kvp_daemon.service": { "name": "hv_kvp_daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "logrotate.service": { "name": "logrotate.service", "source": "systemd", "state": "stopped", "status": "static" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "modprobe@.service": { "name": "modprobe@.service", "source": "systemd", "state": "unknown", "status": "static" }, "modprobe@configfs.service": { "name": "modprobe@configfs.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@drm.service": { "name": "modprobe@drm.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@efi_pstore.service": { "name": "modprobe@efi_pstore.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@fuse.service": { "name": "modprobe@fuse.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "network.service": { "name": "network.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "static" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "oddjobd.service": { "name": "oddjobd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "pam_namespace.service": { "name": "pam_namespace.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quotaon.service": { "name": "quotaon.service", "source": "systemd", "state": "inactive", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rdisc.service": { "name": "rdisc.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-svcgssd.service": { "name": "rpc-svcgssd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpmdb-rebuild.service": { "name": "rpmdb-rebuild.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "selinux-check-proper-disable.service": { "name": "selinux-check-proper-disable.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "serial-getty@ttyS0.service": { "name": "serial-getty@ttyS0.service", "source": "systemd", "state": "running", "status": "active" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bless-boot.service": { "name": "systemd-bless-boot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-boot-check-no-failures.service": { "name": "systemd-boot-check-no-failures.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-boot-random-seed.service": { "name": "systemd-boot-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-boot-update.service": { "name": "systemd-boot-update.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-growfs-root.service": { "name": "systemd-growfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-growfs@.service": { "name": "systemd-growfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-resume@.service": { "name": "systemd-hibernate-resume@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-journald@.service": { "name": "systemd-journald@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-network-generator.service": { "name": "systemd-network-generator.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-networkd-wait-online.service": { "name": "systemd-networkd-wait-online.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-pcrfs-root.service": { "name": "systemd-pcrfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pcrfs@.service": { "name": "systemd-pcrfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrmachine.service": { "name": "systemd-pcrmachine.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-initrd.service": { "name": "systemd-pcrphase-initrd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-sysinit.service": { "name": "systemd-pcrphase-sysinit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase.service": { "name": "systemd-pcrphase.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-quotacheck.service": { "name": "systemd-quotacheck.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-repart.service": { "name": "systemd-repart.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysext.service": { "name": "systemd-sysext.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-sysupdate-reboot.service": { "name": "systemd-sysupdate-reboot.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysupdate.service": { "name": "systemd-sysupdate.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles.service": { "name": "systemd-tmpfiles.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "teamd@.service": { "name": "teamd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "active" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "active" }, "ypbind.service": { "name": "ypbind.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "yppasswdd.service": { "name": "yppasswdd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ypserv.service": { "name": "ypserv.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ypxfrd.service": { "name": "ypxfrd.service", "source": "systemd", "state": "stopped", "status": "not-found" } } }, "changed": false } TASK [Run the role] ************************************************************ task path: /tmp/collections-MIG/ansible_collections/fedora/linux_system_roles/tests/metrics/tests_bz1855544.yml:27 Saturday 24 May 2025 06:23:55 -0400 (0:00:02.641) 0:00:05.166 ********** included: fedora.linux_system_roles.metrics for managed-node2 TASK [fedora.linux_system_roles.metrics : Ensure ansible_facts used by role] *** task path: /tmp/collections-MIG/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:3 Saturday 24 May 2025 06:23:55 -0400 (0:00:00.034) 0:00:05.201 ********** skipping: [managed-node2] => { "changed": false, "false_condition": "__metrics_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.metrics : Add Elasticsearch to metrics domain list] *** task path: /tmp/collections-MIG/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:8 Saturday 24 May 2025 06:23:55 -0400 (0:00:00.018) 0:00:05.220 ********** skipping: [managed-node2] => { "changed": false, "false_condition": "metrics_from_elasticsearch | d(false) | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.metrics : Add SQL Server to metrics domain list] *** task path: /tmp/collections-MIG/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:13 Saturday 24 May 2025 06:23:55 -0400 (0:00:00.015) 0:00:05.235 ********** skipping: [managed-node2] => { "changed": false, "false_condition": "metrics_from_mssql | d(false) | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.metrics : Add Postfix to metrics domain list] *** task path: /tmp/collections-MIG/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:18 Saturday 24 May 2025 06:23:55 -0400 (0:00:00.015) 0:00:05.250 ********** skipping: [managed-node2] => { "changed": false, "false_condition": "metrics_from_postfix | d(false) | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.metrics : Add bpftrace to metrics domain list] *** task path: /tmp/collections-MIG/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:23 Saturday 24 May 2025 06:23:55 -0400 (0:00:00.014) 0:00:05.265 ********** ok: [managed-node2] => { "ansible_facts": { "__metrics_domains": [ "bpftrace" ] }, "changed": false } TASK [fedora.linux_system_roles.metrics : Setup metrics access for roles] ****** task path: /tmp/collections-MIG/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:28 Saturday 24 May 2025 06:23:55 -0400 (0:00:00.020) 0:00:05.286 ********** ok: [managed-node2] => { "ansible_facts": { "__metrics_accounts": [ { "saslpassword": "metrics", "sasluser": "metrics", "user": "metrics" } ] }, "changed": false } TASK [Configure Elasticsearch metrics] ***************************************** task path: /tmp/collections-MIG/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:35 Saturday 24 May 2025 06:23:55 -0400 (0:00:00.020) 0:00:05.307 ********** skipping: [managed-node2] => { "changed": false, "false_condition": "metrics_from_elasticsearch | d(false) | bool or metrics_into_elasticsearch | d(false) | bool\n", "skip_reason": "Conditional result was False" } TASK [Configure Spark metrics] ************************************************* task path: /tmp/collections-MIG/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:50 Saturday 24 May 2025 06:23:55 -0400 (0:00:00.015) 0:00:05.322 ********** skipping: [managed-node2] => { "changed": false, "false_condition": "metrics_from_spark | d(false) | bool or metrics_into_spark | d(false) | bool\n", "skip_reason": "Conditional result was False" } TASK [Configure SQL Server metrics.] ******************************************* task path: /tmp/collections-MIG/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:62 Saturday 24 May 2025 06:23:55 -0400 (0:00:00.015) 0:00:05.338 ********** skipping: [managed-node2] => { "changed": false, "false_condition": "metrics_from_mssql | d(false) | bool", "skip_reason": "Conditional result was False" } TASK [Configure Postfix metrics.] ********************************************** task path: /tmp/collections-MIG/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:70 Saturday 24 May 2025 06:23:55 -0400 (0:00:00.015) 0:00:05.353 ********** skipping: [managed-node2] => { "changed": false, "false_condition": "metrics_from_postfix | d(false) | bool", "skip_reason": "Conditional result was False" } TASK [Setup bpftrace metrics.] ************************************************* task path: /tmp/collections-MIG/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:78 Saturday 24 May 2025 06:23:55 -0400 (0:00:00.015) 0:00:05.369 ********** included: fedora.linux_system_roles.private_metrics_subrole_bpftrace for managed-node2 TASK [fedora.linux_system_roles.private_metrics_subrole_bpftrace : Set platform/version specific variables] *** task path: /tmp/collections-MIG/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_bpftrace/tasks/main.yml:4 Saturday 24 May 2025 06:23:55 -0400 (0:00:00.040) 0:00:05.410 ********** ok: [managed-node2] => (item=/tmp/collections-MIG/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_bpftrace/vars/default.yml) => { "ansible_facts": { "bpftrace_metrics_provider": "pcp" }, "ansible_included_var_files": [ "/tmp/collections-MIG/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_bpftrace/vars/default.yml" ], "ansible_loop_var": "item", "changed": false, "item": "/tmp/collections-MIG/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_bpftrace/vars/default.yml" } ok: [managed-node2] => (item=/tmp/collections-MIG/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_bpftrace/vars/RedHat.yml) => { "ansible_facts": {}, "ansible_included_var_files": [ "/tmp/collections-MIG/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_bpftrace/vars/RedHat.yml" ], "ansible_loop_var": "item", "changed": false, "item": "/tmp/collections-MIG/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_bpftrace/vars/RedHat.yml" } skipping: [managed-node2] => (item=/tmp/collections-MIG/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_bpftrace/vars/CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "item is file", "item": "/tmp/collections-MIG/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_bpftrace/vars/CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node2] => (item=/tmp/collections-MIG/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_bpftrace/vars/CentOS_9.yml) => { "ansible_facts": { "__bpftrace_packages": [ "bpftrace" ], "__bpftrace_packages_pcp": [ "pcp-pmda-bpftrace" ] }, "ansible_included_var_files": [ "/tmp/collections-MIG/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_bpftrace/vars/CentOS_9.yml" ], "ansible_loop_var": "item", "changed": false, "item": "/tmp/collections-MIG/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_bpftrace/vars/CentOS_9.yml" } ok: [managed-node2] => (item=/tmp/collections-MIG/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_bpftrace/vars/CentOS_9.yml) => { "ansible_facts": { "__bpftrace_packages": [ "bpftrace" ], "__bpftrace_packages_pcp": [ "pcp-pmda-bpftrace" ] }, "ansible_included_var_files": [ "/tmp/collections-MIG/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_bpftrace/vars/CentOS_9.yml" ], "ansible_loop_var": "item", "changed": false, "item": "/tmp/collections-MIG/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_bpftrace/vars/CentOS_9.yml" } TASK [fedora.linux_system_roles.private_metrics_subrole_bpftrace : Check if system is ostree] *** task path: /tmp/collections-MIG/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_bpftrace/tasks/main.yml:18 Saturday 24 May 2025 06:23:55 -0400 (0:00:00.047) 0:00:05.457 ********** ok: [managed-node2] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.private_metrics_subrole_bpftrace : Set flag to indicate system is ostree] *** task path: /tmp/collections-MIG/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_bpftrace/tasks/main.yml:23 Saturday 24 May 2025 06:23:55 -0400 (0:00:00.432) 0:00:05.890 ********** ok: [managed-node2] => { "ansible_facts": { "__ansible_pcp_is_ostree": false }, "changed": false } TASK [fedora.linux_system_roles.private_metrics_subrole_bpftrace : Establish bpftrace package names] *** task path: /tmp/collections-MIG/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_bpftrace/tasks/main.yml:27 Saturday 24 May 2025 06:23:55 -0400 (0:00:00.024) 0:00:05.914 ********** ok: [managed-node2] => { "ansible_facts": { "__bpftrace_packages_extra": [ "bpftrace" ] }, "changed": false } TASK [fedora.linux_system_roles.private_metrics_subrole_bpftrace : Establish bpftrace metrics package names] *** task path: /tmp/collections-MIG/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_bpftrace/tasks/main.yml:34 Saturday 24 May 2025 06:23:55 -0400 (0:00:00.022) 0:00:05.937 ********** ok: [managed-node2] => { "ansible_facts": { "__bpftrace_packages_extra": [ "pcp-pmda-bpftrace", "bpftrace" ] }, "changed": false } TASK [fedora.linux_system_roles.private_metrics_subrole_bpftrace : Install needed bpftrace metrics packages] *** task path: /tmp/collections-MIG/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_bpftrace/tasks/main.yml:41 Saturday 24 May 2025 06:23:55 -0400 (0:00:00.022) 0:00:05.959 ********** fatal: [managed-node2]: FAILED! => { "changed": false, "rc": 1, "results": [] } MSG: Failed to download metadata for repo 'highavailability': Cannot download repomd.xml: Cannot download repodata/repomd.xml: All mirrors were tried TASK [Handle test failure] ***************************************************** task path: /tmp/collections-MIG/ansible_collections/fedora/linux_system_roles/tests/metrics/tests_bz1855544.yml:47 Saturday 24 May 2025 06:24:00 -0400 (0:00:04.681) 0:00:10.640 ********** included: /tmp/collections-MIG/ansible_collections/fedora/linux_system_roles/tests/metrics/handle_test_failure.yml for managed-node2 TASK [Collect logs] ************************************************************ task path: /tmp/collections-MIG/ansible_collections/fedora/linux_system_roles/tests/metrics/handle_test_failure.yml:2 Saturday 24 May 2025 06:24:00 -0400 (0:00:00.026) 0:00:10.667 ********** ok: [managed-node2] => { "changed": false, "cmd": "journalctl -ex\necho '##################'\necho List of SELinux AVCs - note list may be empty\ngrep type=AVC /var/log/audit/audit.log\necho '##################'\nls -alrtF /run\nif [ -d /run/pcp ]; then\n ls -alrtF /run/pcp\nelse\n echo ERROR - /run/pcp does not exist\nfi\n", "delta": "0:00:00.042179", "end": "2025-05-24 06:24:01.059532", "rc": 0, "start": "2025-05-24 06:24:01.017353" } STDOUT: May 24 06:19:49 localhost kernel: VFS: Disk quotas dquot_6.6.0 May 24 06:19:49 localhost kernel: VFS: Dquot-cache hash table entries: 512 (order 0, 4096 bytes) May 24 06:19:49 localhost kernel: pnp: PnP ACPI init May 24 06:19:49 localhost kernel: system 00:00: [mem 0x00000000-0x0009ffff] could not be reserved May 24 06:19:49 localhost kernel: system 00:01: [io 0x08a0-0x08a3] has been reserved May 24 06:19:49 localhost kernel: system 00:01: [io 0x0cc0-0x0ccf] has been reserved May 24 06:19:49 localhost kernel: system 00:01: [io 0x04d0-0x04d1] has been reserved May 24 06:19:49 localhost kernel: xen: --> pirq=17 -> irq=8 (gsi=8) May 24 06:19:49 localhost kernel: xen: --> pirq=18 -> irq=12 (gsi=12) May 24 06:19:49 localhost kernel: xen: --> pirq=19 -> irq=1 (gsi=1) May 24 06:19:49 localhost kernel: xen: --> pirq=20 -> irq=6 (gsi=6) May 24 06:19:49 localhost kernel: pnp 00:05: [dma 2] May 24 06:19:49 localhost kernel: xen: --> pirq=21 -> irq=4 (gsi=4) May 24 06:19:49 localhost kernel: system 00:07: [io 0x10c0-0x1141] has been reserved May 24 06:19:49 localhost kernel: system 00:07: [io 0xb044-0xb047] has been reserved May 24 06:19:49 localhost kernel: pnp: PnP ACPI: found 8 devices May 24 06:19:49 localhost kernel: clocksource: acpi_pm: mask: 0xffffff max_cycles: 0xffffff, max_idle_ns: 2085701024 ns May 24 06:19:49 localhost kernel: NET: Registered PF_INET protocol family May 24 06:19:49 localhost kernel: IP idents hash table entries: 65536 (order: 7, 524288 bytes, linear) May 24 06:19:49 localhost kernel: tcp_listen_portaddr_hash hash table entries: 2048 (order: 3, 32768 bytes, linear) May 24 06:19:49 localhost kernel: Table-perturb hash table entries: 65536 (order: 6, 262144 bytes, linear) May 24 06:19:49 localhost kernel: TCP established hash table entries: 32768 (order: 6, 262144 bytes, linear) May 24 06:19:49 localhost kernel: TCP bind hash table entries: 32768 (order: 7, 524288 bytes, linear) May 24 06:19:49 localhost kernel: TCP: Hash tables configured (established 32768 bind 32768) May 24 06:19:49 localhost kernel: MPTCP token hash table entries: 4096 (order: 4, 98304 bytes, linear) May 24 06:19:49 localhost kernel: UDP hash table entries: 2048 (order: 4, 65536 bytes, linear) May 24 06:19:49 localhost kernel: UDP-Lite hash table entries: 2048 (order: 4, 65536 bytes, linear) May 24 06:19:49 localhost kernel: NET: Registered PF_UNIX/PF_LOCAL protocol family May 24 06:19:49 localhost kernel: NET: Registered PF_XDP protocol family May 24 06:19:49 localhost kernel: pci_bus 0000:00: resource 4 [io 0x0000-0x0cf7 window] May 24 06:19:49 localhost kernel: pci_bus 0000:00: resource 5 [io 0x0d00-0xffff window] May 24 06:19:49 localhost kernel: pci_bus 0000:00: resource 6 [mem 0x000a0000-0x000bffff window] May 24 06:19:49 localhost kernel: pci_bus 0000:00: resource 7 [mem 0xf0000000-0xffffffff window] May 24 06:19:49 localhost kernel: pci 0000:00:01.0: PIIX3: Enabling Passive Release May 24 06:19:49 localhost kernel: pci 0000:00:00.0: Limiting direct PCI/PCI transfers May 24 06:19:49 localhost kernel: PCI: CLS 0 bytes, default 64 May 24 06:19:49 localhost kernel: ACPI: bus type thunderbolt registered May 24 06:19:49 localhost kernel: clocksource: tsc: mask: 0xffffffffffffffff max_cycles: 0x29cd4133323, max_idle_ns: 440795296220 ns May 24 06:19:49 localhost kernel: Trying to unpack rootfs image as initramfs... May 24 06:19:49 localhost kernel: Initialise system trusted keyrings May 24 06:19:49 localhost kernel: Key type blacklist registered May 24 06:19:49 localhost kernel: workingset: timestamp_bits=36 max_order=20 bucket_order=0 May 24 06:19:49 localhost kernel: zbud: loaded May 24 06:19:49 localhost kernel: integrity: Platform Keyring initialized May 24 06:19:49 localhost kernel: integrity: Machine keyring initialized May 24 06:19:49 localhost kernel: NET: Registered PF_ALG protocol family May 24 06:19:49 localhost kernel: xor: automatically using best checksumming function avx May 24 06:19:49 localhost kernel: Key type asymmetric registered May 24 06:19:49 localhost kernel: Asymmetric key parser 'x509' registered May 24 06:19:49 localhost kernel: Block layer SCSI generic (bsg) driver version 0.4 loaded (major 246) May 24 06:19:49 localhost kernel: io scheduler mq-deadline registered May 24 06:19:49 localhost kernel: io scheduler kyber registered May 24 06:19:49 localhost kernel: io scheduler bfq registered May 24 06:19:49 localhost kernel: atomic64_test: passed for x86-64 platform with CX8 and with SSE May 24 06:19:49 localhost kernel: shpchp: Standard Hot Plug PCI Controller Driver version: 0.4 May 24 06:19:49 localhost kernel: input: Power Button as /devices/LNXSYSTM:00/LNXPWRBN:00/input/input0 May 24 06:19:49 localhost kernel: ACPI: button: Power Button [PWRF] May 24 06:19:49 localhost kernel: input: Sleep Button as /devices/LNXSYSTM:00/LNXSLPBN:00/input/input1 May 24 06:19:49 localhost kernel: ACPI: button: Sleep Button [SLPF] May 24 06:19:49 localhost kernel: xen: --> pirq=22 -> irq=47 (gsi=47) May 24 06:19:49 localhost kernel: xen:grant_table: Grant tables using version 1 layout May 24 06:19:49 localhost kernel: Grant table initialized May 24 06:19:49 localhost kernel: Cannot get hvm parameter CONSOLE_EVTCHN (18): -22! May 24 06:19:49 localhost kernel: Serial: 8250/16550 driver, 4 ports, IRQ sharing enabled May 24 06:19:49 localhost kernel: 00:06: ttyS0 at I/O 0x3f8 (irq = 4, base_baud = 115200) is a 16550A May 24 06:19:49 localhost kernel: Non-volatile memory driver v1.3 May 24 06:19:49 localhost kernel: rdac: device handler registered May 24 06:19:49 localhost kernel: hp_sw: device handler registered May 24 06:19:49 localhost kernel: emc: device handler registered May 24 06:19:49 localhost kernel: alua: device handler registered May 24 06:19:49 localhost kernel: usbcore: registered new interface driver usbserial_generic May 24 06:19:49 localhost kernel: usbserial: USB Serial support registered for generic May 24 06:19:49 localhost kernel: i8042: PNP: PS/2 Controller [PNP0303:PS2K,PNP0f13:PS2M] at 0x60,0x64 irq 1,12 May 24 06:19:49 localhost kernel: i8042: Warning: Keylock active May 24 06:19:49 localhost kernel: serio: i8042 KBD port at 0x60,0x64 irq 1 May 24 06:19:49 localhost kernel: serio: i8042 AUX port at 0x60,0x64 irq 12 May 24 06:19:49 localhost kernel: mousedev: PS/2 mouse device common for all mice May 24 06:19:49 localhost kernel: rtc_cmos 00:02: registered as rtc0 May 24 06:19:49 localhost kernel: rtc_cmos 00:02: setting system clock to 2025-05-24T10:19:48 UTC (1748081988) May 24 06:19:49 localhost kernel: rtc_cmos 00:02: alarms up to one day, 114 bytes nvram, hpet irqs May 24 06:19:49 localhost kernel: intel_pstate: CPU model not supported May 24 06:19:49 localhost kernel: hid: raw HID events driver (C) Jiri Kosina May 24 06:19:49 localhost kernel: usbcore: registered new interface driver usbhid May 24 06:19:49 localhost kernel: usbhid: USB HID core driver May 24 06:19:49 localhost kernel: drop_monitor: Initializing network drop monitor service May 24 06:19:49 localhost kernel: Initializing XFRM netlink socket May 24 06:19:49 localhost kernel: NET: Registered PF_INET6 protocol family May 24 06:19:49 localhost kernel: Freeing initrd memory: 37068K May 24 06:19:49 localhost kernel: Segment Routing with IPv6 May 24 06:19:49 localhost kernel: NET: Registered PF_PACKET protocol family May 24 06:19:49 localhost kernel: mpls_gso: MPLS GSO support May 24 06:19:49 localhost kernel: IPI shorthand broadcast: enabled May 24 06:19:49 localhost kernel: AVX2 version of gcm_enc/dec engaged. May 24 06:19:49 localhost kernel: AES CTR mode by8 optimization enabled May 24 06:19:49 localhost kernel: sched_clock: Marking stable (849284097, 251221008)->(1199194994, -98689889) May 24 06:19:49 localhost kernel: registered taskstats version 1 May 24 06:19:49 localhost kernel: Loading compiled-in X.509 certificates May 24 06:19:49 localhost kernel: Loaded X.509 cert 'The CentOS Project: CentOS Stream kernel signing key: f8c7f7d0e4c329fac8112926b4bb7228168649ea' May 24 06:19:49 localhost kernel: Loaded X.509 cert 'Red Hat Enterprise Linux Driver Update Program (key 3): bf57f3e87362bc7229d9f465321773dfd1f77a80' May 24 06:19:49 localhost kernel: Loaded X.509 cert 'Red Hat Enterprise Linux kpatch signing key: 4d38fd864ebe18c5f0b72e3852e2014c3a676fc8' May 24 06:19:49 localhost kernel: Loaded X.509 cert 'RH-IMA-CA: Red Hat IMA CA: fb31825dd0e073685b264e3038963673f753959a' May 24 06:19:49 localhost kernel: Loaded X.509 cert 'Nvidia GPU OOT signing 001: 55e1cef88193e60419f0b0ec379c49f77545acf0' May 24 06:19:49 localhost kernel: Demotion targets for Node 0: null May 24 06:19:49 localhost kernel: page_owner is disabled May 24 06:19:49 localhost kernel: Key type .fscrypt registered May 24 06:19:49 localhost kernel: Key type fscrypt-provisioning registered May 24 06:19:49 localhost kernel: Key type big_key registered May 24 06:19:49 localhost kernel: Key type encrypted registered May 24 06:19:49 localhost kernel: ima: No TPM chip found, activating TPM-bypass! May 24 06:19:49 localhost kernel: Loading compiled-in module X.509 certificates May 24 06:19:49 localhost kernel: Loaded X.509 cert 'The CentOS Project: CentOS Stream kernel signing key: f8c7f7d0e4c329fac8112926b4bb7228168649ea' May 24 06:19:49 localhost kernel: ima: Allocated hash algorithm: sha256 May 24 06:19:49 localhost kernel: ima: No architecture policies found May 24 06:19:49 localhost kernel: evm: Initialising EVM extended attributes: May 24 06:19:49 localhost kernel: evm: security.selinux May 24 06:19:49 localhost kernel: evm: security.SMACK64 (disabled) May 24 06:19:49 localhost kernel: evm: security.SMACK64EXEC (disabled) May 24 06:19:49 localhost kernel: evm: security.SMACK64TRANSMUTE (disabled) May 24 06:19:49 localhost kernel: evm: security.SMACK64MMAP (disabled) May 24 06:19:49 localhost kernel: evm: security.apparmor (disabled) May 24 06:19:49 localhost kernel: evm: security.ima May 24 06:19:49 localhost kernel: evm: security.capability May 24 06:19:49 localhost kernel: evm: HMAC attrs: 0x1 May 24 06:19:49 localhost kernel: input: AT Translated Set 2 keyboard as /devices/platform/i8042/serio0/input/input2 May 24 06:19:49 localhost kernel: Running certificate verification RSA selftest May 24 06:19:49 localhost kernel: Loaded X.509 cert 'Certificate verification self-testing key: f58703bb33ce1b73ee02eccdee5b8817518fe3db' May 24 06:19:49 localhost kernel: Running certificate verification ECDSA selftest May 24 06:19:49 localhost kernel: Loaded X.509 cert 'Certificate verification ECDSA self-testing key: 2900bcea1deb7bc8479a84a23d758efdfdd2b2d3' May 24 06:19:49 localhost kernel: xenbus_probe_frontend: Device with no driver: device/vbd/768 May 24 06:19:49 localhost kernel: xenbus_probe_frontend: Device with no driver: device/vif/0 May 24 06:19:49 localhost kernel: clk: Disabling unused clocks May 24 06:19:49 localhost kernel: Freeing unused decrypted memory: 2028K May 24 06:19:49 localhost kernel: Freeing unused kernel image (initmem) memory: 4056K May 24 06:19:49 localhost kernel: Write protecting the kernel read-only data: 30720k May 24 06:19:49 localhost kernel: Freeing unused kernel image (rodata/data gap) memory: 684K May 24 06:19:49 localhost kernel: x86/mm: Checked W+X mappings: passed, no W+X pages found. May 24 06:19:49 localhost kernel: x86/mm: Checking user space page tables May 24 06:19:49 localhost kernel: x86/mm: Checked W+X mappings: passed, no W+X pages found. May 24 06:19:49 localhost kernel: Run /init as init process May 24 06:19:49 localhost kernel: with arguments: May 24 06:19:49 localhost kernel: /init May 24 06:19:49 localhost kernel: rhgb May 24 06:19:49 localhost kernel: with environment: May 24 06:19:49 localhost kernel: HOME=/ May 24 06:19:49 localhost kernel: TERM=linux May 24 06:19:49 localhost kernel: BOOT_IMAGE=(hd0,msdos1)/boot/vmlinuz-5.14.0-583.el9.x86_64 May 24 06:19:49 localhost systemd[1]: systemd 252-53.el9 running in system mode (+PAM +AUDIT +SELINUX -APPARMOR +IMA +SMACK +SECCOMP +GCRYPT +GNUTLS +OPENSSL +ACL +BLKID +CURL +ELFUTILS +FIDO2 +IDN2 -IDN -IPTC +KMOD +LIBCRYPTSETUP +LIBFDISK +PCRE2 -PWQUALITY +P11KIT -QRENCODE +TPM2 +BZIP2 +LZ4 +XZ +ZLIB +ZSTD -BPF_FRAMEWORK +XKBCOMMON +UTMP +SYSVINIT default-hierarchy=unified) May 24 06:19:49 localhost systemd[1]: Detected virtualization xen. May 24 06:19:49 localhost systemd[1]: Detected architecture x86-64. May 24 06:19:49 localhost systemd[1]: Running in initrd. May 24 06:19:49 localhost systemd[1]: No hostname configured, using default hostname. May 24 06:19:49 localhost systemd[1]: Hostname set to . May 24 06:19:49 localhost systemd[1]: Queued start job for default target Initrd Default Target. May 24 06:19:49 localhost systemd[1]: Started Dispatch Password Requests to Console Directory Watch. May 24 06:19:49 localhost systemd[1]: Reached target Initrd /usr File System. May 24 06:19:49 localhost systemd[1]: Reached target Local File Systems. May 24 06:19:49 localhost systemd[1]: Reached target Path Units. May 24 06:19:49 localhost systemd[1]: Reached target Slice Units. May 24 06:19:49 localhost systemd[1]: Reached target Swaps. May 24 06:19:49 localhost systemd[1]: Reached target Timer Units. May 24 06:19:49 localhost systemd[1]: Listening on D-Bus System Message Bus Socket. May 24 06:19:49 localhost systemd[1]: Listening on Journal Socket (/dev/log). May 24 06:19:49 localhost systemd[1]: Listening on Journal Socket. May 24 06:19:49 localhost systemd[1]: Listening on udev Control Socket. May 24 06:19:49 localhost systemd[1]: Listening on udev Kernel Socket. May 24 06:19:49 localhost systemd[1]: Reached target Socket Units. May 24 06:19:49 localhost systemd[1]: Starting Create List of Static Device Nodes... May 24 06:19:49 localhost systemd[1]: Starting Journal Service... May 24 06:19:49 localhost systemd[1]: Load Kernel Modules was skipped because no trigger condition checks were met. May 24 06:19:49 localhost systemd[1]: Starting Apply Kernel Variables... May 24 06:19:49 localhost systemd[1]: Starting Create System Users... May 24 06:19:49 localhost systemd-journald[231]: Journal started ░░ Subject: The journal has been started ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The system journal process has started up, opened the journal ░░ files for writing and is now ready to process requests. May 24 06:19:49 localhost systemd-journald[231]: Runtime Journal (/run/log/journal/72dbb896bb5848cfa189670654d7ed83) is 8.0M, max 69.3M, 61.3M free. ░░ Subject: Disk space used by the journal ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ Runtime Journal (/run/log/journal/72dbb896bb5848cfa189670654d7ed83) is currently using 8.0M. ░░ Maximum allowed usage is set to 69.3M. ░░ Leaving at least 34.6M free (of currently available 685.0M of disk space). ░░ Enforced usage limit is thus 69.3M, of which 61.3M are still available. ░░ ░░ The limits controlling how much disk space is used by the journal may ░░ be configured with SystemMaxUse=, SystemKeepFree=, SystemMaxFileSize=, ░░ RuntimeMaxUse=, RuntimeKeepFree=, RuntimeMaxFileSize= settings in ░░ /etc/systemd/journald.conf. See journald.conf(5) for details. May 24 06:19:49 localhost systemd-sysusers[233]: Creating group 'nobody' with GID 65534. May 24 06:19:49 localhost systemd[1]: Starting Setup Virtual Console... May 24 06:19:49 localhost systemd[1]: Started Journal Service. May 24 06:19:49 localhost systemd[1]: Finished Create List of Static Device Nodes. ░░ Subject: A start job for unit kmod-static-nodes.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit kmod-static-nodes.service has finished successfully. ░░ ░░ The job identifier is 26. May 24 06:19:49 localhost systemd[1]: Finished Apply Kernel Variables. ░░ Subject: A start job for unit systemd-sysctl.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-sysctl.service has finished successfully. ░░ ░░ The job identifier is 9. May 24 06:19:49 localhost systemd-sysusers[233]: Creating group 'users' with GID 100. May 24 06:19:49 localhost systemd-sysusers[233]: Creating group 'dbus' with GID 81. May 24 06:19:49 localhost systemd-sysusers[233]: Creating user 'dbus' (System Message Bus) with UID 81 and GID 81. May 24 06:19:49 localhost systemd[1]: Finished Create System Users. ░░ Subject: A start job for unit systemd-sysusers.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-sysusers.service has finished successfully. ░░ ░░ The job identifier is 20. May 24 06:19:49 localhost systemd[1]: Starting Create Static Device Nodes in /dev... ░░ Subject: A start job for unit systemd-tmpfiles-setup-dev.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-tmpfiles-setup-dev.service has begun execution. ░░ ░░ The job identifier is 21. May 24 06:19:49 localhost systemd[1]: Starting Create Volatile Files and Directories... ░░ Subject: A start job for unit systemd-tmpfiles-setup.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-tmpfiles-setup.service has begun execution. ░░ ░░ The job identifier is 17. May 24 06:19:49 localhost systemd[1]: Finished Create Static Device Nodes in /dev. ░░ Subject: A start job for unit systemd-tmpfiles-setup-dev.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-tmpfiles-setup-dev.service has finished successfully. ░░ ░░ The job identifier is 21. May 24 06:19:49 localhost systemd[1]: Finished Create Volatile Files and Directories. ░░ Subject: A start job for unit systemd-tmpfiles-setup.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-tmpfiles-setup.service has finished successfully. ░░ ░░ The job identifier is 17. May 24 06:19:49 localhost systemd[1]: Finished Setup Virtual Console. ░░ Subject: A start job for unit systemd-vconsole-setup.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-vconsole-setup.service has finished successfully. ░░ ░░ The job identifier is 48. May 24 06:19:49 localhost systemd[1]: dracut ask for additional cmdline parameters was skipped because no trigger condition checks were met. ░░ Subject: A start job for unit dracut-cmdline-ask.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit dracut-cmdline-ask.service has finished successfully. ░░ ░░ The job identifier is 47. May 24 06:19:49 localhost systemd[1]: Starting dracut cmdline hook... ░░ Subject: A start job for unit dracut-cmdline.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit dracut-cmdline.service has begun execution. ░░ ░░ The job identifier is 43. May 24 06:19:49 localhost dracut-cmdline[247]: dracut-9 dracut-057-87.git20250311.el9 May 24 06:19:49 localhost dracut-cmdline[247]: Using kernel command line parameters: BOOT_IMAGE=(hd0,msdos1)/boot/vmlinuz-5.14.0-583.el9.x86_64 root=UUID=5de771f1-9acb-4b75-bf56-ae0a7779ba73 ro rhgb crashkernel=2G-64G:256M,64G-:512M net.ifnames=0 console=tty0 console=ttyS0,115200n8 May 24 06:19:49 localhost systemd[1]: Finished dracut cmdline hook. ░░ Subject: A start job for unit dracut-cmdline.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit dracut-cmdline.service has finished successfully. ░░ ░░ The job identifier is 43. May 24 06:19:49 localhost systemd[1]: Starting dracut pre-udev hook... ░░ Subject: A start job for unit dracut-pre-udev.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit dracut-pre-udev.service has begun execution. ░░ ░░ The job identifier is 44. May 24 06:19:49 localhost systemd[1]: Finished dracut pre-udev hook. ░░ Subject: A start job for unit dracut-pre-udev.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit dracut-pre-udev.service has finished successfully. ░░ ░░ The job identifier is 44. May 24 06:19:49 localhost systemd[1]: Starting Rule-based Manager for Device Events and Files... ░░ Subject: A start job for unit systemd-udevd.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-udevd.service has begun execution. ░░ ░░ The job identifier is 12. May 24 06:19:49 localhost systemd-udevd[329]: Using default interface naming scheme 'rhel-9.0'. May 24 06:19:49 localhost kernel: input: ImPS/2 Generic Wheel Mouse as /devices/platform/i8042/serio1/input/input4 May 24 06:19:49 localhost systemd[1]: Started Rule-based Manager for Device Events and Files. ░░ Subject: A start job for unit systemd-udevd.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-udevd.service has finished successfully. ░░ ░░ The job identifier is 12. May 24 06:19:49 localhost systemd[1]: dracut pre-trigger hook was skipped because no trigger condition checks were met. ░░ Subject: A start job for unit dracut-pre-trigger.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit dracut-pre-trigger.service has finished successfully. ░░ ░░ The job identifier is 53. May 24 06:19:49 localhost systemd[1]: Starting Coldplug All udev Devices... ░░ Subject: A start job for unit systemd-udev-trigger.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-udev-trigger.service has begun execution. ░░ ░░ The job identifier is 11. May 24 06:19:49 localhost systemd[1]: Finished Coldplug All udev Devices. ░░ Subject: A start job for unit systemd-udev-trigger.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-udev-trigger.service has finished successfully. ░░ ░░ The job identifier is 11. May 24 06:19:49 localhost systemd[1]: Reached target System Initialization. ░░ Subject: A start job for unit sysinit.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sysinit.target has finished successfully. ░░ ░░ The job identifier is 3. May 24 06:19:49 localhost systemd[1]: Reached target Basic System. ░░ Subject: A start job for unit basic.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit basic.target has finished successfully. ░░ ░░ The job identifier is 2. May 24 06:19:49 localhost systemd[1]: nm-initrd.service was skipped because of an unmet condition check (ConditionPathExists=/run/NetworkManager/initrd/neednet). ░░ Subject: A start job for unit nm-initrd.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit nm-initrd.service has finished successfully. ░░ ░░ The job identifier is 40. May 24 06:19:49 localhost systemd[1]: Reached target Network. ░░ Subject: A start job for unit network.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit network.target has finished successfully. ░░ ░░ The job identifier is 42. May 24 06:19:49 localhost systemd[1]: nm-wait-online-initrd.service was skipped because of an unmet condition check (ConditionPathExists=/run/NetworkManager/initrd/neednet). ░░ Subject: A start job for unit nm-wait-online-initrd.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit nm-wait-online-initrd.service has finished successfully. ░░ ░░ The job identifier is 39. May 24 06:19:49 localhost systemd[1]: Starting dracut initqueue hook... ░░ Subject: A start job for unit dracut-initqueue.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit dracut-initqueue.service has begun execution. ░░ ░░ The job identifier is 45. May 24 06:19:49 localhost kernel: Invalid max_queues (4), will use default max: 2. May 24 06:19:49 localhost kernel: blkfront: xvda: barrier or flush: disabled; persistent grants: disabled; indirect descriptors: enabled; May 24 06:19:49 localhost kernel: xvda: xvda1 May 24 06:19:49 localhost kernel: xen_netfront: Initialising Xen virtual ethernet driver May 24 06:19:49 localhost kernel: libata version 3.00 loaded. May 24 06:19:49 localhost kernel: ata_piix 0000:00:01.1: version 2.13 May 24 06:19:49 localhost kernel: scsi host0: ata_piix May 24 06:19:49 localhost kernel: scsi host1: ata_piix May 24 06:19:49 localhost kernel: ata1: PATA max MWDMA2 cmd 0x1f0 ctl 0x3f6 bmdma 0xc000 irq 14 lpm-pol 0 May 24 06:19:49 localhost kernel: ata2: PATA max MWDMA2 cmd 0x170 ctl 0x376 bmdma 0xc008 irq 15 lpm-pol 0 May 24 06:19:50 localhost systemd-udevd[334]: Network interface NamePolicy= disabled on kernel command line. May 24 06:19:50 localhost systemd[1]: Found device /dev/disk/by-uuid/5de771f1-9acb-4b75-bf56-ae0a7779ba73. ░░ Subject: A start job for unit dev-disk-by\x2duuid-5de771f1\x2d9acb\x2d4b75\x2dbf56\x2dae0a7779ba73.device has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit dev-disk-by\x2duuid-5de771f1\x2d9acb\x2d4b75\x2dbf56\x2dae0a7779ba73.device has finished successfully. ░░ ░░ The job identifier is 33. May 24 06:19:50 localhost systemd[1]: Reached target Initrd Root Device. ░░ Subject: A start job for unit initrd-root-device.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit initrd-root-device.target has finished successfully. ░░ ░░ The job identifier is 36. May 24 06:19:50 localhost systemd[1]: Finished dracut initqueue hook. ░░ Subject: A start job for unit dracut-initqueue.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit dracut-initqueue.service has finished successfully. ░░ ░░ The job identifier is 45. May 24 06:19:50 localhost systemd[1]: Reached target Preparation for Remote File Systems. ░░ Subject: A start job for unit remote-fs-pre.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit remote-fs-pre.target has finished successfully. ░░ ░░ The job identifier is 46. May 24 06:19:50 localhost systemd[1]: Reached target Remote File Systems. ░░ Subject: A start job for unit remote-fs.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit remote-fs.target has finished successfully. ░░ ░░ The job identifier is 52. May 24 06:19:50 localhost systemd[1]: Starting dracut pre-mount hook... ░░ Subject: A start job for unit dracut-pre-mount.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit dracut-pre-mount.service has begun execution. ░░ ░░ The job identifier is 49. May 24 06:19:50 localhost systemd[1]: Finished dracut pre-mount hook. ░░ Subject: A start job for unit dracut-pre-mount.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit dracut-pre-mount.service has finished successfully. ░░ ░░ The job identifier is 49. May 24 06:19:50 localhost systemd[1]: Starting File System Check on /dev/disk/by-uuid/5de771f1-9acb-4b75-bf56-ae0a7779ba73... ░░ Subject: A start job for unit systemd-fsck-root.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-fsck-root.service has begun execution. ░░ ░░ The job identifier is 34. May 24 06:19:50 localhost systemd-fsck[393]: /usr/sbin/fsck.xfs: XFS file system. May 24 06:19:50 localhost systemd[1]: Finished File System Check on /dev/disk/by-uuid/5de771f1-9acb-4b75-bf56-ae0a7779ba73. ░░ Subject: A start job for unit systemd-fsck-root.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-fsck-root.service has finished successfully. ░░ ░░ The job identifier is 34. May 24 06:19:50 localhost systemd[1]: Mounting /sysroot... ░░ Subject: A start job for unit sysroot.mount has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sysroot.mount has begun execution. ░░ ░░ The job identifier is 32. May 24 06:19:50 localhost kernel: SGI XFS with ACLs, security attributes, scrub, quota, no debug enabled May 24 06:19:50 localhost kernel: XFS (xvda1): Mounting V5 Filesystem 5de771f1-9acb-4b75-bf56-ae0a7779ba73 May 24 06:19:51 localhost kernel: XFS (xvda1): Ending clean mount May 24 06:19:51 localhost systemd[1]: Mounted /sysroot. ░░ Subject: A start job for unit sysroot.mount has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sysroot.mount has finished successfully. ░░ ░░ The job identifier is 32. May 24 06:19:51 localhost systemd[1]: Reached target Initrd Root File System. ░░ Subject: A start job for unit initrd-root-fs.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit initrd-root-fs.target has finished successfully. ░░ ░░ The job identifier is 38. May 24 06:19:51 localhost systemd[1]: Starting Mountpoints Configured in the Real Root... ░░ Subject: A start job for unit initrd-parse-etc.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit initrd-parse-etc.service has begun execution. ░░ ░░ The job identifier is 37. May 24 06:19:51 localhost systemd[1]: initrd-parse-etc.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit initrd-parse-etc.service has successfully entered the 'dead' state. May 24 06:19:51 localhost systemd[1]: Finished Mountpoints Configured in the Real Root. ░░ Subject: A start job for unit initrd-parse-etc.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit initrd-parse-etc.service has finished successfully. ░░ ░░ The job identifier is 37. May 24 06:19:51 localhost systemd[1]: Reached target Initrd File Systems. ░░ Subject: A start job for unit initrd-fs.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit initrd-fs.target has finished successfully. ░░ ░░ The job identifier is 54. May 24 06:19:51 localhost systemd[1]: Reached target Initrd Default Target. ░░ Subject: A start job for unit initrd.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit initrd.target has finished successfully. ░░ ░░ The job identifier is 1. May 24 06:19:51 localhost systemd[1]: dracut mount hook was skipped because no trigger condition checks were met. ░░ Subject: A start job for unit dracut-mount.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit dracut-mount.service has finished successfully. ░░ ░░ The job identifier is 50. May 24 06:19:51 localhost systemd[1]: Starting dracut pre-pivot and cleanup hook... ░░ Subject: A start job for unit dracut-pre-pivot.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit dracut-pre-pivot.service has begun execution. ░░ ░░ The job identifier is 51. May 24 06:19:51 localhost systemd[1]: Finished dracut pre-pivot and cleanup hook. ░░ Subject: A start job for unit dracut-pre-pivot.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit dracut-pre-pivot.service has finished successfully. ░░ ░░ The job identifier is 51. May 24 06:19:51 localhost systemd[1]: Starting Cleaning Up and Shutting Down Daemons... ░░ Subject: A start job for unit initrd-cleanup.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit initrd-cleanup.service has begun execution. ░░ ░░ The job identifier is 57. May 24 06:19:51 localhost systemd[1]: Stopped target Network. ░░ Subject: A stop job for unit network.target has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit network.target has finished. ░░ ░░ The job identifier is 113 and the job result is done. May 24 06:19:51 localhost systemd[1]: Stopped target Timer Units. ░░ Subject: A stop job for unit timers.target has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit timers.target has finished. ░░ ░░ The job identifier is 105 and the job result is done. May 24 06:19:51 localhost systemd[1]: dbus.socket: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit dbus.socket has successfully entered the 'dead' state. May 24 06:19:51 localhost systemd[1]: Closed D-Bus System Message Bus Socket. ░░ Subject: A stop job for unit dbus.socket has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit dbus.socket has finished. ░░ ░░ The job identifier is 88 and the job result is done. May 24 06:19:51 localhost systemd[1]: dracut-pre-pivot.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit dracut-pre-pivot.service has successfully entered the 'dead' state. May 24 06:19:51 localhost systemd[1]: Stopped dracut pre-pivot and cleanup hook. ░░ Subject: A stop job for unit dracut-pre-pivot.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit dracut-pre-pivot.service has finished. ░░ ░░ The job identifier is 112 and the job result is done. May 24 06:19:51 localhost systemd[1]: Stopped target Initrd Default Target. ░░ Subject: A stop job for unit initrd.target has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit initrd.target has finished. ░░ ░░ The job identifier is 96 and the job result is done. May 24 06:19:51 localhost systemd[1]: Stopped target Basic System. ░░ Subject: A stop job for unit basic.target has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit basic.target has finished. ░░ ░░ The job identifier is 95 and the job result is done. May 24 06:19:51 localhost systemd[1]: Stopped target Initrd Root Device. ░░ Subject: A stop job for unit initrd-root-device.target has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit initrd-root-device.target has finished. ░░ ░░ The job identifier is 92 and the job result is done. May 24 06:19:51 localhost systemd[1]: Stopped target Initrd /usr File System. ░░ Subject: A stop job for unit initrd-usr-fs.target has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit initrd-usr-fs.target has finished. ░░ ░░ The job identifier is 103 and the job result is done. May 24 06:19:51 localhost systemd[1]: Stopped target Path Units. ░░ Subject: A stop job for unit paths.target has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit paths.target has finished. ░░ ░░ The job identifier is 98 and the job result is done. May 24 06:19:51 localhost systemd[1]: systemd-ask-password-console.path: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-ask-password-console.path has successfully entered the 'dead' state. May 24 06:19:51 localhost systemd[1]: Stopped Dispatch Password Requests to Console Directory Watch. ░░ Subject: A stop job for unit systemd-ask-password-console.path has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit systemd-ask-password-console.path has finished. ░░ ░░ The job identifier is 114 and the job result is done. May 24 06:19:51 localhost systemd[1]: Stopped target Remote File Systems. ░░ Subject: A stop job for unit remote-fs.target has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit remote-fs.target has finished. ░░ ░░ The job identifier is 109 and the job result is done. May 24 06:19:51 localhost systemd[1]: Stopped target Preparation for Remote File Systems. ░░ Subject: A stop job for unit remote-fs-pre.target has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit remote-fs-pre.target has finished. ░░ ░░ The job identifier is 107 and the job result is done. May 24 06:19:51 localhost systemd[1]: Stopped target Slice Units. ░░ Subject: A stop job for unit slices.target has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit slices.target has finished. ░░ ░░ The job identifier is 106 and the job result is done. May 24 06:19:51 localhost systemd[1]: Stopped target Socket Units. ░░ Subject: A stop job for unit sockets.target has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit sockets.target has finished. ░░ ░░ The job identifier is 115 and the job result is done. May 24 06:19:51 localhost systemd[1]: Stopped target System Initialization. ░░ Subject: A stop job for unit sysinit.target has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit sysinit.target has finished. ░░ ░░ The job identifier is 94 and the job result is done. May 24 06:19:51 localhost systemd[1]: Stopped target Local File Systems. ░░ Subject: A stop job for unit local-fs.target has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit local-fs.target has finished. ░░ ░░ The job identifier is 104 and the job result is done. May 24 06:19:51 localhost systemd[1]: Stopped target Swaps. ░░ Subject: A stop job for unit swap.target has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit swap.target has finished. ░░ ░░ The job identifier is 99 and the job result is done. May 24 06:19:51 localhost systemd[1]: dracut-pre-mount.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit dracut-pre-mount.service has successfully entered the 'dead' state. May 24 06:19:51 localhost systemd[1]: Stopped dracut pre-mount hook. ░░ Subject: A stop job for unit dracut-pre-mount.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit dracut-pre-mount.service has finished. ░░ ░░ The job identifier is 93 and the job result is done. May 24 06:19:51 localhost systemd[1]: dracut-initqueue.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit dracut-initqueue.service has successfully entered the 'dead' state. May 24 06:19:51 localhost systemd[1]: Stopped dracut initqueue hook. ░░ Subject: A stop job for unit dracut-initqueue.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit dracut-initqueue.service has finished. ░░ ░░ The job identifier is 110 and the job result is done. May 24 06:19:51 localhost systemd[1]: systemd-sysctl.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-sysctl.service has successfully entered the 'dead' state. May 24 06:19:51 localhost systemd[1]: Stopped Apply Kernel Variables. ░░ Subject: A stop job for unit systemd-sysctl.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit systemd-sysctl.service has finished. ░░ ░░ The job identifier is 111 and the job result is done. May 24 06:19:51 localhost systemd[1]: systemd-tmpfiles-setup.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-tmpfiles-setup.service has successfully entered the 'dead' state. May 24 06:19:51 localhost systemd[1]: Stopped Create Volatile Files and Directories. ░░ Subject: A stop job for unit systemd-tmpfiles-setup.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit systemd-tmpfiles-setup.service has finished. ░░ ░░ The job identifier is 87 and the job result is done. May 24 06:19:51 localhost systemd[1]: systemd-udev-trigger.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-udev-trigger.service has successfully entered the 'dead' state. May 24 06:19:51 localhost systemd[1]: Stopped Coldplug All udev Devices. ░░ Subject: A stop job for unit systemd-udev-trigger.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit systemd-udev-trigger.service has finished. ░░ ░░ The job identifier is 83 and the job result is done. May 24 06:19:51 localhost systemd[1]: Stopping Rule-based Manager for Device Events and Files... ░░ Subject: A stop job for unit systemd-udevd.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit systemd-udevd.service has begun execution. ░░ ░░ The job identifier is 80. May 24 06:19:51 localhost systemd[1]: systemd-vconsole-setup.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-vconsole-setup.service has successfully entered the 'dead' state. May 24 06:19:51 localhost systemd[1]: Stopped Setup Virtual Console. ░░ Subject: A stop job for unit systemd-vconsole-setup.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit systemd-vconsole-setup.service has finished. ░░ ░░ The job identifier is 108 and the job result is done. May 24 06:19:51 localhost systemd[1]: run-credentials-systemd\x2dtmpfiles\x2dsetup.service.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-credentials-systemd\x2dtmpfiles\x2dsetup.service.mount has successfully entered the 'dead' state. May 24 06:19:51 localhost systemd[1]: run-credentials-systemd\x2dsysctl.service.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-credentials-systemd\x2dsysctl.service.mount has successfully entered the 'dead' state. May 24 06:19:51 localhost systemd[1]: initrd-cleanup.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit initrd-cleanup.service has successfully entered the 'dead' state. May 24 06:19:51 localhost systemd[1]: Finished Cleaning Up and Shutting Down Daemons. ░░ Subject: A start job for unit initrd-cleanup.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit initrd-cleanup.service has finished successfully. ░░ ░░ The job identifier is 57. May 24 06:19:51 localhost systemd[1]: systemd-udevd.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-udevd.service has successfully entered the 'dead' state. May 24 06:19:51 localhost systemd[1]: Stopped Rule-based Manager for Device Events and Files. ░░ Subject: A stop job for unit systemd-udevd.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit systemd-udevd.service has finished. ░░ ░░ The job identifier is 80 and the job result is done. May 24 06:19:51 localhost systemd[1]: systemd-udevd-control.socket: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-udevd-control.socket has successfully entered the 'dead' state. May 24 06:19:51 localhost systemd[1]: Closed udev Control Socket. ░░ Subject: A stop job for unit systemd-udevd-control.socket has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit systemd-udevd-control.socket has finished. ░░ ░░ The job identifier is 79 and the job result is done. May 24 06:19:51 localhost systemd[1]: systemd-udevd-kernel.socket: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-udevd-kernel.socket has successfully entered the 'dead' state. May 24 06:19:51 localhost systemd[1]: Closed udev Kernel Socket. ░░ Subject: A stop job for unit systemd-udevd-kernel.socket has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit systemd-udevd-kernel.socket has finished. ░░ ░░ The job identifier is 82 and the job result is done. May 24 06:19:51 localhost systemd[1]: dracut-pre-udev.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit dracut-pre-udev.service has successfully entered the 'dead' state. May 24 06:19:51 localhost systemd[1]: Stopped dracut pre-udev hook. ░░ Subject: A stop job for unit dracut-pre-udev.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit dracut-pre-udev.service has finished. ░░ ░░ The job identifier is 101 and the job result is done. May 24 06:19:51 localhost systemd[1]: dracut-cmdline.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit dracut-cmdline.service has successfully entered the 'dead' state. May 24 06:19:51 localhost systemd[1]: Stopped dracut cmdline hook. ░░ Subject: A stop job for unit dracut-cmdline.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit dracut-cmdline.service has finished. ░░ ░░ The job identifier is 102 and the job result is done. May 24 06:19:51 localhost systemd[1]: Starting Cleanup udev Database... ░░ Subject: A start job for unit initrd-udevadm-cleanup-db.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit initrd-udevadm-cleanup-db.service has begun execution. ░░ ░░ The job identifier is 78. May 24 06:19:51 localhost systemd[1]: systemd-tmpfiles-setup-dev.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-tmpfiles-setup-dev.service has successfully entered the 'dead' state. May 24 06:19:51 localhost systemd[1]: Stopped Create Static Device Nodes in /dev. ░░ Subject: A stop job for unit systemd-tmpfiles-setup-dev.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit systemd-tmpfiles-setup-dev.service has finished. ░░ ░░ The job identifier is 86 and the job result is done. May 24 06:19:51 localhost systemd[1]: kmod-static-nodes.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit kmod-static-nodes.service has successfully entered the 'dead' state. May 24 06:19:51 localhost systemd[1]: Stopped Create List of Static Device Nodes. ░░ Subject: A stop job for unit kmod-static-nodes.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit kmod-static-nodes.service has finished. ░░ ░░ The job identifier is 100 and the job result is done. May 24 06:19:51 localhost systemd[1]: systemd-sysusers.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-sysusers.service has successfully entered the 'dead' state. May 24 06:19:51 localhost systemd[1]: Stopped Create System Users. ░░ Subject: A stop job for unit systemd-sysusers.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit systemd-sysusers.service has finished. ░░ ░░ The job identifier is 85 and the job result is done. May 24 06:19:51 localhost systemd[1]: run-credentials-systemd\x2dtmpfiles\x2dsetup\x2ddev.service.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-credentials-systemd\x2dtmpfiles\x2dsetup\x2ddev.service.mount has successfully entered the 'dead' state. May 24 06:19:51 localhost systemd[1]: run-credentials-systemd\x2dsysusers.service.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-credentials-systemd\x2dsysusers.service.mount has successfully entered the 'dead' state. May 24 06:19:51 localhost systemd[1]: initrd-udevadm-cleanup-db.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit initrd-udevadm-cleanup-db.service has successfully entered the 'dead' state. May 24 06:19:51 localhost systemd[1]: Finished Cleanup udev Database. ░░ Subject: A start job for unit initrd-udevadm-cleanup-db.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit initrd-udevadm-cleanup-db.service has finished successfully. ░░ ░░ The job identifier is 78. May 24 06:19:51 localhost systemd[1]: Reached target Switch Root. ░░ Subject: A start job for unit initrd-switch-root.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit initrd-switch-root.target has finished successfully. ░░ ░░ The job identifier is 60. May 24 06:19:51 localhost systemd[1]: Starting Switch Root... ░░ Subject: A start job for unit initrd-switch-root.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit initrd-switch-root.service has begun execution. ░░ ░░ The job identifier is 77. May 24 06:19:51 localhost systemd[1]: Switching root. May 24 06:19:51 localhost systemd-journald[231]: Journal stopped ░░ Subject: The journal has been stopped ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The system journal process has shut down and closed all currently ░░ active journal files. May 24 06:20:01 localhost systemd-journald[231]: Received SIGTERM from PID 1 (systemd). May 24 06:20:01 localhost kernel: audit: type=1404 audit(1748081994.619:2): enforcing=1 old_enforcing=0 auid=4294967295 ses=4294967295 enabled=1 old-enabled=1 lsm=selinux res=1 May 24 06:20:01 localhost kernel: SELinux: policy capability network_peer_controls=1 May 24 06:20:01 localhost kernel: SELinux: policy capability open_perms=1 May 24 06:20:01 localhost kernel: SELinux: policy capability extended_socket_class=1 May 24 06:20:01 localhost kernel: SELinux: policy capability always_check_network=0 May 24 06:20:01 localhost kernel: SELinux: policy capability cgroup_seclabel=1 May 24 06:20:01 localhost kernel: SELinux: policy capability nnp_nosuid_transition=1 May 24 06:20:01 localhost kernel: SELinux: policy capability genfs_seclabel_symlinks=1 May 24 06:20:01 localhost kernel: audit: type=1403 audit(1748081995.468:3): auid=4294967295 ses=4294967295 lsm=selinux res=1 May 24 06:20:01 localhost systemd[1]: Successfully loaded SELinux policy in 853.656ms. May 24 06:20:01 localhost systemd[1]: Relabelled /dev, /dev/shm, /run, /sys/fs/cgroup in 19.170ms. May 24 06:20:01 localhost systemd[1]: systemd 252-53.el9 running in system mode (+PAM +AUDIT +SELINUX -APPARMOR +IMA +SMACK +SECCOMP +GCRYPT +GNUTLS +OPENSSL +ACL +BLKID +CURL +ELFUTILS +FIDO2 +IDN2 -IDN -IPTC +KMOD +LIBCRYPTSETUP +LIBFDISK +PCRE2 -PWQUALITY +P11KIT -QRENCODE +TPM2 +BZIP2 +LZ4 +XZ +ZLIB +ZSTD -BPF_FRAMEWORK +XKBCOMMON +UTMP +SYSVINIT default-hierarchy=unified) May 24 06:20:01 localhost systemd[1]: Detected virtualization xen. May 24 06:20:01 localhost systemd[1]: Detected architecture x86-64. May 24 06:20:01 localhost systemd[1]: Initializing machine ID from random generator. May 24 06:20:01 localhost systemd[1]: Installed transient /etc/machine-id file. May 24 06:20:01 localhost systemd-rc-local-generator[459]: /etc/rc.d/rc.local is not marked executable, skipping. May 24 06:20:01 localhost systemd[1]: initrd-switch-root.service: Deactivated successfully. May 24 06:20:01 localhost systemd[1]: Stopped Switch Root. May 24 06:20:01 localhost systemd[1]: systemd-journald.service: Scheduled restart job, restart counter is at 1. May 24 06:20:01 localhost systemd[1]: Created slice Slice /system/getty. May 24 06:20:01 localhost systemd[1]: Created slice Slice /system/modprobe. May 24 06:20:01 localhost systemd[1]: Created slice Slice /system/serial-getty. May 24 06:20:01 localhost systemd[1]: Created slice Slice /system/sshd-keygen. May 24 06:20:01 localhost systemd[1]: Created slice User and Session Slice. May 24 06:20:01 localhost systemd[1]: Started Dispatch Password Requests to Console Directory Watch. May 24 06:20:01 localhost systemd[1]: Started Forward Password Requests to Wall Directory Watch. May 24 06:20:01 localhost systemd[1]: Set up automount Arbitrary Executable File Formats File System Automount Point. May 24 06:20:01 localhost systemd[1]: Reached target Local Encrypted Volumes. May 24 06:20:01 localhost systemd[1]: Stopped target Switch Root. May 24 06:20:01 localhost systemd[1]: Stopped target Initrd File Systems. May 24 06:20:01 localhost systemd[1]: Stopped target Initrd Root File System. May 24 06:20:01 localhost systemd[1]: Reached target Local Integrity Protected Volumes. May 24 06:20:01 localhost systemd[1]: Reached target Path Units. May 24 06:20:01 localhost systemd[1]: Reached target Slice Units. May 24 06:20:01 localhost systemd[1]: Reached target Swaps. May 24 06:20:01 localhost systemd[1]: Reached target Local Verity Protected Volumes. May 24 06:20:01 localhost systemd[1]: Listening on RPCbind Server Activation Socket. May 24 06:20:01 localhost systemd[1]: Reached target RPC Port Mapper. May 24 06:20:01 localhost systemd[1]: Listening on Process Core Dump Socket. May 24 06:20:01 localhost systemd[1]: Listening on initctl Compatibility Named Pipe. May 24 06:20:01 localhost systemd[1]: Listening on udev Control Socket. May 24 06:20:01 localhost systemd[1]: Listening on udev Kernel Socket. May 24 06:20:01 localhost systemd[1]: Mounting Huge Pages File System... May 24 06:20:01 localhost systemd[1]: Mounting POSIX Message Queue File System... May 24 06:20:01 localhost systemd[1]: Mounting Kernel Debug File System... May 24 06:20:01 localhost systemd[1]: Mounting Kernel Trace File System... May 24 06:20:01 localhost systemd[1]: Kernel Module supporting RPCSEC_GSS was skipped because of an unmet condition check (ConditionPathExists=/etc/krb5.keytab). May 24 06:20:01 localhost systemd[1]: Starting Create List of Static Device Nodes... May 24 06:20:01 localhost systemd[1]: Starting Load Kernel Module configfs... May 24 06:20:01 localhost systemd[1]: Starting Load Kernel Module drm... May 24 06:20:01 localhost systemd[1]: Starting Load Kernel Module efi_pstore... May 24 06:20:01 localhost systemd[1]: Starting Load Kernel Module fuse... May 24 06:20:01 localhost systemd[1]: Starting Read and set NIS domainname from /etc/sysconfig/network... May 24 06:20:01 localhost systemd[1]: systemd-fsck-root.service: Deactivated successfully. May 24 06:20:01 localhost systemd[1]: Stopped File System Check on Root Device. May 24 06:20:01 localhost systemd[1]: Stopped Journal Service. May 24 06:20:01 localhost systemd[1]: Starting Journal Service... May 24 06:20:01 localhost systemd[1]: Load Kernel Modules was skipped because no trigger condition checks were met. May 24 06:20:01 localhost systemd[1]: Starting Generate network units from Kernel command line... May 24 06:20:01 localhost systemd[1]: TPM2 PCR Machine ID Measurement was skipped because of an unmet condition check (ConditionPathExists=/sys/firmware/efi/efivars/StubPcrKernelImage-4a67b082-0a4c-41cf-b6c7-440b29bb8c4f). May 24 06:20:01 localhost systemd[1]: Starting Remount Root and Kernel File Systems... May 24 06:20:01 localhost systemd[1]: Repartition Root Disk was skipped because no trigger condition checks were met. May 24 06:20:01 localhost systemd[1]: Starting Apply Kernel Variables... May 24 06:20:01 localhost systemd[1]: Starting Coldplug All udev Devices... May 24 06:20:01 localhost systemd[1]: Mounted Huge Pages File System. May 24 06:20:01 localhost systemd[1]: Mounted POSIX Message Queue File System. May 24 06:20:01 localhost systemd[1]: Mounted Kernel Debug File System. May 24 06:20:01 localhost systemd[1]: Mounted Kernel Trace File System. May 24 06:20:01 localhost systemd[1]: Finished Generate network units from Kernel command line. May 24 06:20:01 localhost systemd[1]: Finished Remount Root and Kernel File Systems. May 24 06:20:01 localhost systemd[1]: First Boot Wizard was skipped because of an unmet condition check (ConditionFirstBoot=yes). May 24 06:20:01 localhost systemd[1]: Rebuild Hardware Database was skipped because of an unmet condition check (ConditionNeedsUpdate=/etc). May 24 06:20:01 localhost systemd[1]: Starting Load/Save OS Random Seed... May 24 06:20:01 localhost systemd[1]: Create System Users was skipped because no trigger condition checks were met. May 24 06:20:01 localhost systemd[1]: Finished Read and set NIS domainname from /etc/sysconfig/network. May 24 06:20:01 localhost systemd[1]: Finished Apply Kernel Variables. May 24 06:20:01 localhost systemd[1]: Finished Load/Save OS Random Seed. May 24 06:20:01 localhost systemd[1]: First Boot Complete was skipped because of an unmet condition check (ConditionFirstBoot=yes). May 24 06:20:01 localhost systemd[1]: Finished Create List of Static Device Nodes. May 24 06:20:01 localhost systemd[1]: Starting Create Static Device Nodes in /dev... May 24 06:20:01 localhost systemd-journald[498]: Journal started ░░ Subject: The journal has been started ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The system journal process has started up, opened the journal ░░ files for writing and is now ready to process requests. May 24 06:20:01 localhost systemd-journald[498]: Runtime Journal (/run/log/journal/35ab3e8dfcfc4f79b6782a5fff97617a) is 8.0M, max 69.3M, 61.3M free. ░░ Subject: Disk space used by the journal ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ Runtime Journal (/run/log/journal/35ab3e8dfcfc4f79b6782a5fff97617a) is currently using 8.0M. ░░ Maximum allowed usage is set to 69.3M. ░░ Leaving at least 34.6M free (of currently available 676.9M of disk space). ░░ Enforced usage limit is thus 69.3M, of which 61.3M are still available. ░░ ░░ The limits controlling how much disk space is used by the journal may ░░ be configured with SystemMaxUse=, SystemKeepFree=, SystemMaxFileSize=, ░░ RuntimeMaxUse=, RuntimeKeepFree=, RuntimeMaxFileSize= settings in ░░ /etc/systemd/journald.conf. See journald.conf(5) for details. May 24 06:20:01 localhost systemd[1]: Queued start job for default target Multi-User System. May 24 06:20:01 localhost systemd[1]: systemd-journald.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-journald.service has successfully entered the 'dead' state. May 24 06:20:01 localhost systemd[1]: Started Journal Service. May 24 06:20:02 localhost systemd[1]: Starting Flush Journal to Persistent Storage... ░░ Subject: A start job for unit systemd-journal-flush.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-journal-flush.service has begun execution. ░░ ░░ The job identifier is 141. May 24 06:20:02 localhost systemd[1]: Finished Coldplug All udev Devices. ░░ Subject: A start job for unit systemd-udev-trigger.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-udev-trigger.service has finished successfully. ░░ ░░ The job identifier is 133. May 24 06:20:02 localhost systemd-journald[498]: Runtime Journal (/run/log/journal/35ab3e8dfcfc4f79b6782a5fff97617a) is 8.0M, max 69.3M, 61.3M free. ░░ Subject: Disk space used by the journal ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ Runtime Journal (/run/log/journal/35ab3e8dfcfc4f79b6782a5fff97617a) is currently using 8.0M. ░░ Maximum allowed usage is set to 69.3M. ░░ Leaving at least 34.6M free (of currently available 676.9M of disk space). ░░ Enforced usage limit is thus 69.3M, of which 61.3M are still available. ░░ ░░ The limits controlling how much disk space is used by the journal may ░░ be configured with SystemMaxUse=, SystemKeepFree=, SystemMaxFileSize=, ░░ RuntimeMaxUse=, RuntimeKeepFree=, RuntimeMaxFileSize= settings in ░░ /etc/systemd/journald.conf. See journald.conf(5) for details. May 24 06:20:02 localhost systemd-journald[498]: Received client request to flush runtime journal. May 24 06:20:02 localhost systemd[1]: Finished Flush Journal to Persistent Storage. ░░ Subject: A start job for unit systemd-journal-flush.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-journal-flush.service has finished successfully. ░░ ░░ The job identifier is 141. May 24 06:20:02 localhost systemd[1]: modprobe@configfs.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit modprobe@configfs.service has successfully entered the 'dead' state. May 24 06:20:02 localhost systemd[1]: Finished Load Kernel Module configfs. ░░ Subject: A start job for unit modprobe@configfs.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit modprobe@configfs.service has finished successfully. ░░ ░░ The job identifier is 158. May 24 06:20:02 localhost systemd[1]: modprobe@efi_pstore.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit modprobe@efi_pstore.service has successfully entered the 'dead' state. May 24 06:20:02 localhost systemd[1]: Finished Load Kernel Module efi_pstore. ░░ Subject: A start job for unit modprobe@efi_pstore.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit modprobe@efi_pstore.service has finished successfully. ░░ ░░ The job identifier is 173. May 24 06:20:02 localhost systemd[1]: Mounting Kernel Configuration File System... ░░ Subject: A start job for unit sys-kernel-config.mount has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sys-kernel-config.mount has begun execution. ░░ ░░ The job identifier is 157. May 24 06:20:02 localhost systemd[1]: Platform Persistent Storage Archival was skipped because of an unmet condition check (ConditionDirectoryNotEmpty=/sys/fs/pstore). ░░ Subject: A start job for unit systemd-pstore.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-pstore.service has finished successfully. ░░ ░░ The job identifier is 172. May 24 06:20:02 localhost systemd[1]: Mounted Kernel Configuration File System. ░░ Subject: A start job for unit sys-kernel-config.mount has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sys-kernel-config.mount has finished successfully. ░░ ░░ The job identifier is 157. May 24 06:20:02 localhost kernel: fuse: init (API version 7.37) May 24 06:20:02 localhost systemd[1]: modprobe@fuse.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit modprobe@fuse.service has successfully entered the 'dead' state. May 24 06:20:02 localhost systemd[1]: Finished Load Kernel Module fuse. ░░ Subject: A start job for unit modprobe@fuse.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit modprobe@fuse.service has finished successfully. ░░ ░░ The job identifier is 178. May 24 06:20:02 localhost kernel: ACPI: bus type drm_connector registered May 24 06:20:02 localhost systemd[1]: Mounting FUSE Control File System... ░░ Subject: A start job for unit sys-fs-fuse-connections.mount has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sys-fs-fuse-connections.mount has begun execution. ░░ ░░ The job identifier is 177. May 24 06:20:02 localhost systemd[1]: modprobe@drm.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit modprobe@drm.service has successfully entered the 'dead' state. May 24 06:20:02 localhost systemd[1]: Finished Load Kernel Module drm. ░░ Subject: A start job for unit modprobe@drm.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit modprobe@drm.service has finished successfully. ░░ ░░ The job identifier is 236. May 24 06:20:02 localhost systemd[1]: Mounted FUSE Control File System. ░░ Subject: A start job for unit sys-fs-fuse-connections.mount has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sys-fs-fuse-connections.mount has finished successfully. ░░ ░░ The job identifier is 177. May 24 06:20:03 localhost systemd[1]: Finished Create Static Device Nodes in /dev. ░░ Subject: A start job for unit systemd-tmpfiles-setup-dev.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-tmpfiles-setup-dev.service has finished successfully. ░░ ░░ The job identifier is 126. May 24 06:20:03 localhost systemd[1]: Reached target Preparation for Local File Systems. ░░ Subject: A start job for unit local-fs-pre.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit local-fs-pre.target has finished successfully. ░░ ░░ The job identifier is 145. May 24 06:20:03 localhost systemd[1]: Reached target Local File Systems. ░░ Subject: A start job for unit local-fs.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit local-fs.target has finished successfully. ░░ ░░ The job identifier is 143. May 24 06:20:03 localhost systemd[1]: Rebuild Dynamic Linker Cache was skipped because no trigger condition checks were met. ░░ Subject: A start job for unit ldconfig.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit ldconfig.service has finished successfully. ░░ ░░ The job identifier is 174. May 24 06:20:03 localhost systemd[1]: Mark the need to relabel after reboot was skipped because of an unmet condition check (ConditionSecurity=!selinux). ░░ Subject: A start job for unit selinux-autorelabel-mark.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit selinux-autorelabel-mark.service has finished successfully. ░░ ░░ The job identifier is 176. May 24 06:20:03 localhost systemd[1]: Set Up Additional Binary Formats was skipped because no trigger condition checks were met. ░░ Subject: A start job for unit systemd-binfmt.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-binfmt.service has finished successfully. ░░ ░░ The job identifier is 138. May 24 06:20:03 localhost systemd[1]: Update Boot Loader Random Seed was skipped because no trigger condition checks were met. ░░ Subject: A start job for unit systemd-boot-random-seed.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-boot-random-seed.service has finished successfully. ░░ ░░ The job identifier is 163. May 24 06:20:03 localhost systemd[1]: Starting Automatic Boot Loader Update... ░░ Subject: A start job for unit systemd-boot-update.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-boot-update.service has begun execution. ░░ ░░ The job identifier is 154. May 24 06:20:03 localhost systemd[1]: Starting Commit a transient machine-id on disk... ░░ Subject: A start job for unit systemd-machine-id-commit.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-machine-id-commit.service has begun execution. ░░ ░░ The job identifier is 140. May 24 06:20:03 localhost systemd[1]: Starting Create Volatile Files and Directories... ░░ Subject: A start job for unit systemd-tmpfiles-setup.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-tmpfiles-setup.service has begun execution. ░░ ░░ The job identifier is 146. May 24 06:20:03 localhost systemd[1]: Starting Rule-based Manager for Device Events and Files... ░░ Subject: A start job for unit systemd-udevd.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-udevd.service has begun execution. ░░ ░░ The job identifier is 134. May 24 06:20:03 localhost bootctl[511]: Couldn't find EFI system partition, skipping. May 24 06:20:03 localhost systemd[1]: Finished Automatic Boot Loader Update. ░░ Subject: A start job for unit systemd-boot-update.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-boot-update.service has finished successfully. ░░ ░░ The job identifier is 154. May 24 06:20:03 localhost systemd[1]: etc-machine\x2did.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit etc-machine\x2did.mount has successfully entered the 'dead' state. May 24 06:20:03 localhost systemd[1]: Finished Commit a transient machine-id on disk. ░░ Subject: A start job for unit systemd-machine-id-commit.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-machine-id-commit.service has finished successfully. ░░ ░░ The job identifier is 140. May 24 06:20:03 localhost systemd-udevd[514]: Using default interface naming scheme 'rhel-9.0'. May 24 06:20:03 localhost systemd[1]: Finished Create Volatile Files and Directories. ░░ Subject: A start job for unit systemd-tmpfiles-setup.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-tmpfiles-setup.service has finished successfully. ░░ ░░ The job identifier is 146. May 24 06:20:03 localhost systemd[1]: Mounting RPC Pipe File System... ░░ Subject: A start job for unit var-lib-nfs-rpc_pipefs.mount has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit var-lib-nfs-rpc_pipefs.mount has begun execution. ░░ ░░ The job identifier is 219. May 24 06:20:03 localhost systemd[1]: Starting Security Auditing Service... ░░ Subject: A start job for unit auditd.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit auditd.service has begun execution. ░░ ░░ The job identifier is 211. May 24 06:20:03 localhost systemd[1]: Starting RPC Bind... ░░ Subject: A start job for unit rpcbind.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit rpcbind.service has begun execution. ░░ ░░ The job identifier is 232. May 24 06:20:03 localhost systemd[1]: Rebuild Journal Catalog was skipped because of an unmet condition check (ConditionNeedsUpdate=/var). ░░ Subject: A start job for unit systemd-journal-catalog-update.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-journal-catalog-update.service has finished successfully. ░░ ░░ The job identifier is 156. May 24 06:20:03 localhost systemd[1]: Update is Completed was skipped because no trigger condition checks were met. ░░ Subject: A start job for unit systemd-update-done.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-update-done.service has finished successfully. ░░ ░░ The job identifier is 128. May 24 06:20:03 localhost kernel: RPC: Registered named UNIX socket transport module. May 24 06:20:03 localhost kernel: RPC: Registered udp transport module. May 24 06:20:03 localhost kernel: RPC: Registered tcp transport module. May 24 06:20:03 localhost kernel: RPC: Registered tcp-with-tls transport module. May 24 06:20:03 localhost kernel: RPC: Registered tcp NFSv4.1 backchannel transport module. May 24 06:20:03 localhost systemd[1]: Mounted RPC Pipe File System. ░░ Subject: A start job for unit var-lib-nfs-rpc_pipefs.mount has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit var-lib-nfs-rpc_pipefs.mount has finished successfully. ░░ ░░ The job identifier is 219. May 24 06:20:03 localhost systemd[1]: Reached target rpc_pipefs.target. ░░ Subject: A start job for unit rpc_pipefs.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit rpc_pipefs.target has finished successfully. ░░ ░░ The job identifier is 218. May 24 06:20:03 localhost systemd[1]: Started Rule-based Manager for Device Events and Files. ░░ Subject: A start job for unit systemd-udevd.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-udevd.service has finished successfully. ░░ ░░ The job identifier is 134. May 24 06:20:03 localhost systemd[1]: Starting Load Kernel Module configfs... ░░ Subject: A start job for unit modprobe@configfs.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit modprobe@configfs.service has begun execution. ░░ ░░ The job identifier is 263. May 24 06:20:03 localhost systemd[1]: modprobe@configfs.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit modprobe@configfs.service has successfully entered the 'dead' state. May 24 06:20:03 localhost systemd[1]: Finished Load Kernel Module configfs. ░░ Subject: A start job for unit modprobe@configfs.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit modprobe@configfs.service has finished successfully. ░░ ░░ The job identifier is 263. May 24 06:20:03 localhost systemd[1]: Condition check resulted in /dev/ttyS0 being skipped. ░░ Subject: A start job for unit dev-ttyS0.device has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit dev-ttyS0.device has finished successfully. ░░ ░░ The job identifier is 210. May 24 06:20:03 localhost auditd[552]: No plugins found, not dispatching events May 24 06:20:03 localhost systemd-udevd[531]: Network interface NamePolicy= disabled on kernel command line. May 24 06:20:03 localhost kernel: input: PC Speaker as /devices/platform/pcspkr/input/input5 May 24 06:20:03 localhost auditd[552]: Init complete, auditd 3.1.5 listening for events (startup state enable) May 24 06:20:04 localhost kernel: RAPL PMU: API unit is 2^-32 Joules, 0 fixed counters, 655360 ms ovfl timer May 24 06:20:04 localhost systemd[1]: Started RPC Bind. ░░ Subject: A start job for unit rpcbind.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit rpcbind.service has finished successfully. ░░ ░░ The job identifier is 232. May 24 06:20:04 localhost kernel: piix4_smbus 0000:00:01.3: SMBus base address uninitialized - upgrade BIOS or use force_addr=0xaddr May 24 06:20:04 localhost kernel: cirrus 0000:00:02.0: vgaarb: deactivate vga console May 24 06:20:04 localhost kernel: Console: switching to colour dummy device 80x25 May 24 06:20:04 localhost kernel: [drm] Initialized cirrus 2.0.0 for 0000:00:02.0 on minor 0 May 24 06:20:04 localhost kernel: fbcon: cirrusdrmfb (fb0) is primary device May 24 06:20:04 localhost kernel: Console: switching to colour frame buffer device 128x48 May 24 06:20:04 localhost kernel: cirrus 0000:00:02.0: [drm] fb0: cirrusdrmfb frame buffer device May 24 06:20:04 localhost augenrules[556]: /sbin/augenrules: No change May 24 06:20:04 localhost augenrules[581]: No rules May 24 06:20:04 localhost augenrules[581]: enabled 1 May 24 06:20:04 localhost augenrules[581]: failure 1 May 24 06:20:04 localhost augenrules[581]: pid 552 May 24 06:20:04 localhost augenrules[581]: rate_limit 0 May 24 06:20:04 localhost augenrules[581]: backlog_limit 8192 May 24 06:20:04 localhost augenrules[581]: lost 0 May 24 06:20:04 localhost augenrules[581]: backlog 0 May 24 06:20:04 localhost augenrules[581]: backlog_wait_time 60000 May 24 06:20:04 localhost augenrules[581]: backlog_wait_time_actual 0 May 24 06:20:04 localhost augenrules[581]: enabled 1 May 24 06:20:04 localhost augenrules[581]: failure 1 May 24 06:20:04 localhost augenrules[581]: pid 552 May 24 06:20:04 localhost augenrules[581]: rate_limit 0 May 24 06:20:04 localhost augenrules[581]: backlog_limit 8192 May 24 06:20:04 localhost augenrules[581]: lost 0 May 24 06:20:04 localhost augenrules[581]: backlog 4 May 24 06:20:04 localhost augenrules[581]: backlog_wait_time 60000 May 24 06:20:04 localhost augenrules[581]: backlog_wait_time_actual 0 May 24 06:20:04 localhost augenrules[581]: enabled 1 May 24 06:20:04 localhost augenrules[581]: failure 1 May 24 06:20:04 localhost augenrules[581]: pid 552 May 24 06:20:04 localhost augenrules[581]: rate_limit 0 May 24 06:20:04 localhost augenrules[581]: backlog_limit 8192 May 24 06:20:04 localhost augenrules[581]: lost 0 May 24 06:20:04 localhost augenrules[581]: backlog 7 May 24 06:20:04 localhost augenrules[581]: backlog_wait_time 60000 May 24 06:20:04 localhost augenrules[581]: backlog_wait_time_actual 0 May 24 06:20:04 localhost systemd[1]: Started Security Auditing Service. ░░ Subject: A start job for unit auditd.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit auditd.service has finished successfully. ░░ ░░ The job identifier is 211. May 24 06:20:04 localhost systemd[1]: Starting Record System Boot/Shutdown in UTMP... ░░ Subject: A start job for unit systemd-update-utmp.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-update-utmp.service has begun execution. ░░ ░░ The job identifier is 230. May 24 06:20:04 localhost systemd[1]: Finished Record System Boot/Shutdown in UTMP. ░░ Subject: A start job for unit systemd-update-utmp.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-update-utmp.service has finished successfully. ░░ ░░ The job identifier is 230. May 24 06:20:04 localhost systemd[1]: Reached target System Initialization. ░░ Subject: A start job for unit sysinit.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sysinit.target has finished successfully. ░░ ░░ The job identifier is 120. May 24 06:20:04 localhost systemd[1]: Started dnf makecache --timer. ░░ Subject: A start job for unit dnf-makecache.timer has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit dnf-makecache.timer has finished successfully. ░░ ░░ The job identifier is 193. May 24 06:20:04 localhost systemd[1]: Started Daily rotation of log files. ░░ Subject: A start job for unit logrotate.timer has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit logrotate.timer has finished successfully. ░░ ░░ The job identifier is 192. May 24 06:20:04 localhost systemd[1]: Started Daily Cleanup of Temporary Directories. ░░ Subject: A start job for unit systemd-tmpfiles-clean.timer has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-tmpfiles-clean.timer has finished successfully. ░░ ░░ The job identifier is 199. May 24 06:20:04 localhost systemd[1]: Reached target Timer Units. ░░ Subject: A start job for unit timers.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit timers.target has finished successfully. ░░ ░░ The job identifier is 191. May 24 06:20:04 localhost systemd[1]: Listening on D-Bus System Message Bus Socket. ░░ Subject: A start job for unit dbus.socket has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit dbus.socket has finished successfully. ░░ ░░ The job identifier is 184. May 24 06:20:04 localhost systemd[1]: Listening on SSSD Kerberos Cache Manager responder socket. ░░ Subject: A start job for unit sssd-kcm.socket has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sssd-kcm.socket has finished successfully. ░░ ░░ The job identifier is 186. May 24 06:20:04 localhost systemd[1]: Reached target Socket Units. ░░ Subject: A start job for unit sockets.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sockets.target has finished successfully. ░░ ░░ The job identifier is 183. May 24 06:20:04 localhost systemd[1]: Starting D-Bus System Message Bus... ░░ Subject: A start job for unit dbus-broker.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit dbus-broker.service has begun execution. ░░ ░░ The job identifier is 197. May 24 06:20:04 localhost systemd[1]: TPM2 PCR Barrier (Initialization) was skipped because of an unmet condition check (ConditionPathExists=/sys/firmware/efi/efivars/StubPcrKernelImage-4a67b082-0a4c-41cf-b6c7-440b29bb8c4f). ░░ Subject: A start job for unit systemd-pcrphase-sysinit.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-pcrphase-sysinit.service has finished successfully. ░░ ░░ The job identifier is 155. May 24 06:20:04 localhost systemd[1]: Started D-Bus System Message Bus. ░░ Subject: A start job for unit dbus-broker.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit dbus-broker.service has finished successfully. ░░ ░░ The job identifier is 197. May 24 06:20:04 localhost systemd[1]: Reached target Basic System. ░░ Subject: A start job for unit basic.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit basic.target has finished successfully. ░░ ░░ The job identifier is 117. May 24 06:20:04 localhost dbus-broker-lau[596]: Ready May 24 06:20:05 localhost systemd[1]: Starting NTP client/server... ░░ Subject: A start job for unit chronyd.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit chronyd.service has begun execution. ░░ ░░ The job identifier is 223. May 24 06:20:05 localhost systemd[1]: Starting Cloud-init: Local Stage (pre-network)... ░░ Subject: A start job for unit cloud-init-local.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit cloud-init-local.service has begun execution. ░░ ░░ The job identifier is 245. May 24 06:20:05 localhost systemd[1]: Starting Restore /run/initramfs on shutdown... ░░ Subject: A start job for unit dracut-shutdown.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit dracut-shutdown.service has begun execution. ░░ ░░ The job identifier is 142. May 24 06:20:05 localhost systemd[1]: Started irqbalance daemon. ░░ Subject: A start job for unit irqbalance.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit irqbalance.service has finished successfully. ░░ ░░ The job identifier is 222. May 24 06:20:05 localhost systemd[1]: Load CPU microcode update was skipped because of an unmet condition check (ConditionPathExists=/sys/devices/system/cpu/microcode/reload). ░░ Subject: A start job for unit microcode.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit microcode.service has finished successfully. ░░ ░░ The job identifier is 182. May 24 06:20:05 localhost systemd[1]: Started Hardware RNG Entropy Gatherer Daemon. ░░ Subject: A start job for unit rngd.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit rngd.service has finished successfully. ░░ ░░ The job identifier is 233. May 24 06:20:05 localhost systemd[1]: OpenSSH ecdsa Server Key Generation was skipped because of an unmet condition check (ConditionPathExists=!/run/systemd/generator.early/multi-user.target.wants/cloud-init.target). ░░ Subject: A start job for unit sshd-keygen@ecdsa.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sshd-keygen@ecdsa.service has finished successfully. ░░ ░░ The job identifier is 241. May 24 06:20:05 localhost systemd[1]: OpenSSH ed25519 Server Key Generation was skipped because of an unmet condition check (ConditionPathExists=!/run/systemd/generator.early/multi-user.target.wants/cloud-init.target). ░░ Subject: A start job for unit sshd-keygen@ed25519.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sshd-keygen@ed25519.service has finished successfully. ░░ ░░ The job identifier is 239. May 24 06:20:05 localhost systemd[1]: OpenSSH rsa Server Key Generation was skipped because of an unmet condition check (ConditionPathExists=!/run/systemd/generator.early/multi-user.target.wants/cloud-init.target). ░░ Subject: A start job for unit sshd-keygen@rsa.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sshd-keygen@rsa.service has finished successfully. ░░ ░░ The job identifier is 242. May 24 06:20:05 localhost systemd[1]: Reached target sshd-keygen.target. ░░ Subject: A start job for unit sshd-keygen.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sshd-keygen.target has finished successfully. ░░ ░░ The job identifier is 238. May 24 06:20:05 localhost systemd[1]: System Security Services Daemon was skipped because no trigger condition checks were met. ░░ Subject: A start job for unit sssd.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sssd.service has finished successfully. ░░ ░░ The job identifier is 201. May 24 06:20:05 localhost systemd[1]: Reached target User and Group Name Lookups. ░░ Subject: A start job for unit nss-user-lookup.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit nss-user-lookup.target has finished successfully. ░░ ░░ The job identifier is 202. May 24 06:20:05 localhost systemd[1]: Starting User Login Management... ░░ Subject: A start job for unit systemd-logind.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-logind.service has begun execution. ░░ ░░ The job identifier is 234. May 24 06:20:05 localhost systemd[1]: Starting Rotate log files... ░░ Subject: A start job for unit logrotate.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit logrotate.service has begun execution. ░░ ░░ The job identifier is 269. May 24 06:20:05 localhost systemd[1]: Finished Restore /run/initramfs on shutdown. ░░ Subject: A start job for unit dracut-shutdown.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit dracut-shutdown.service has finished successfully. ░░ ░░ The job identifier is 142. May 24 06:20:05 localhost systemd-logind[603]: New seat seat0. ░░ Subject: A new seat seat0 is now available ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A new seat seat0 has been configured and is now available. May 24 06:20:05 localhost systemd-logind[603]: Watching system buttons on /dev/input/event0 (Power Button) May 24 06:20:05 localhost systemd-logind[603]: Watching system buttons on /dev/input/event1 (Sleep Button) May 24 06:20:05 localhost systemd-logind[603]: Watching system buttons on /dev/input/event2 (AT Translated Set 2 keyboard) May 24 06:20:05 localhost systemd[1]: Started User Login Management. ░░ Subject: A start job for unit systemd-logind.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-logind.service has finished successfully. ░░ ░░ The job identifier is 234. May 24 06:20:05 localhost systemd[1]: logrotate.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit logrotate.service has successfully entered the 'dead' state. May 24 06:20:05 localhost systemd[1]: Finished Rotate log files. ░░ Subject: A start job for unit logrotate.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit logrotate.service has finished successfully. ░░ ░░ The job identifier is 269. May 24 06:20:05 localhost rngd[602]: Disabling 7: PKCS11 Entropy generator (pkcs11) May 24 06:20:05 localhost rngd[602]: Disabling 5: NIST Network Entropy Beacon (nist) May 24 06:20:05 localhost rngd[602]: Disabling 9: Qrypt quantum entropy beacon (qrypt) May 24 06:20:05 localhost rngd[602]: Disabling 10: Named pipe entropy input (namedpipe) May 24 06:20:05 localhost rngd[602]: Initializing available sources May 24 06:20:05 localhost rngd[602]: [hwrng ]: Initialization Failed May 24 06:20:05 localhost rngd[602]: [rdrand]: Enabling RDRAND rng support May 24 06:20:05 localhost rngd[602]: [rdrand]: Initialized May 24 06:20:05 localhost rngd[602]: [jitter]: JITTER timeout set to 5 sec May 24 06:20:05 localhost rngd[602]: [jitter]: Initializing AES buffer May 24 06:20:05 localhost chronyd[612]: chronyd version 4.6.1 starting (+CMDMON +NTP +REFCLOCK +RTC +PRIVDROP +SCFILTER +SIGND +ASYNCDNS +NTS +SECHASH +IPV6 +DEBUG) May 24 06:20:05 localhost chronyd[612]: Loaded 0 symmetric keys May 24 06:20:05 localhost chronyd[612]: Using right/UTC timezone to obtain leap second data May 24 06:20:05 localhost chronyd[612]: Frequency 0.000 +/- 1000000.000 ppm read from /var/lib/chrony/drift May 24 06:20:05 localhost chronyd[612]: Loaded seccomp filter (level 2) May 24 06:20:05 localhost systemd[1]: Started NTP client/server. ░░ Subject: A start job for unit chronyd.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit chronyd.service has finished successfully. ░░ ░░ The job identifier is 223. May 24 06:20:09 localhost cloud-init[616]: Cloud-init v. 24.4-5.el9 running 'init-local' at Sat, 24 May 2025 10:20:09 +0000. Up 21.77 seconds. May 24 06:20:10 localhost rngd[602]: [jitter]: Unable to obtain AES key, disabling JITTER source May 24 06:20:10 localhost rngd[602]: [jitter]: Initialization Failed May 24 06:20:10 localhost rngd[602]: Process privileges have been dropped to 2:2 May 24 06:20:10 localhost dhclient[620]: Internet Systems Consortium DHCP Client 4.4.2b1 May 24 06:20:10 localhost dhclient[620]: Copyright 2004-2019 Internet Systems Consortium. May 24 06:20:10 localhost dhclient[620]: All rights reserved. May 24 06:20:10 localhost dhclient[620]: For info, please visit https://www.isc.org/software/dhcp/ May 24 06:20:10 localhost dhclient[620]: May 24 06:20:10 localhost dhclient[620]: Listening on LPF/eth0/0a:ff:c8:32:02:ad May 24 06:20:10 localhost dhclient[620]: Sending on LPF/eth0/0a:ff:c8:32:02:ad May 24 06:20:10 localhost dhclient[620]: Sending on Socket/fallback May 24 06:20:10 localhost dhclient[620]: DHCPDISCOVER on eth0 to 255.255.255.255 port 67 interval 8 (xid=0xf54c6457) May 24 06:20:10 localhost dhclient[620]: DHCPOFFER of 10.31.14.202 from 10.31.12.1 May 24 06:20:10 localhost dhclient[620]: DHCPREQUEST for 10.31.14.202 on eth0 to 255.255.255.255 port 67 (xid=0xf54c6457) May 24 06:20:10 localhost dhclient[620]: DHCPACK of 10.31.14.202 from 10.31.12.1 (xid=0xf54c6457) May 24 06:20:10 localhost dhclient[620]: bound to 10.31.14.202 -- renewal in 1382 seconds. May 24 06:20:10 localhost systemd[1]: Starting Hostname Service... ░░ Subject: A start job for unit systemd-hostnamed.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-hostnamed.service has begun execution. ░░ ░░ The job identifier is 334. May 24 06:20:10 localhost systemd[1]: Started Hostname Service. ░░ Subject: A start job for unit systemd-hostnamed.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-hostnamed.service has finished successfully. ░░ ░░ The job identifier is 334. May 24 06:20:10 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com systemd-hostnamed[635]: Hostname set to (static) May 24 06:20:11 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com systemd[1]: Finished Cloud-init: Local Stage (pre-network). ░░ Subject: A start job for unit cloud-init-local.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit cloud-init-local.service has finished successfully. ░░ ░░ The job identifier is 245. May 24 06:20:11 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com systemd[1]: Reached target Preparation for Network. ░░ Subject: A start job for unit network-pre.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit network-pre.target has finished successfully. ░░ ░░ The job identifier is 165. May 24 06:20:11 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com systemd[1]: Starting Network Manager... ░░ Subject: A start job for unit NetworkManager.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager.service has begun execution. ░░ ░░ The job identifier is 196. May 24 06:20:11 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com NetworkManager[639]: [1748082011.4656] NetworkManager (version 1.53.4-1.el9) is starting... (boot:289635a2-01f3-460a-92ae-3bbbf46c6748) May 24 06:20:11 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com NetworkManager[639]: [1748082011.4659] Read config: /etc/NetworkManager/NetworkManager.conf, /run/NetworkManager/conf.d/15-carrier-timeout.conf May 24 06:20:11 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com NetworkManager[639]: [1748082011.6197] manager[0x55fd812b9080]: monitoring kernel firmware directory '/lib/firmware'. May 24 06:20:11 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com NetworkManager[639]: [1748082011.6230] hostname: hostname: using hostnamed May 24 06:20:11 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com NetworkManager[639]: [1748082011.6230] hostname: static hostname changed from (none) to "ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com" May 24 06:20:11 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com NetworkManager[639]: [1748082011.6235] dns-mgr: init: dns=default,systemd-resolved rc-manager=symlink (auto) May 24 06:20:11 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com NetworkManager[639]: [1748082011.8315] manager[0x55fd812b9080]: rfkill: Wi-Fi hardware radio set enabled May 24 06:20:11 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com NetworkManager[639]: [1748082011.8315] manager[0x55fd812b9080]: rfkill: WWAN hardware radio set enabled May 24 06:20:11 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com systemd[1]: Listening on Load/Save RF Kill Switch Status /dev/rfkill Watch. ░░ Subject: A start job for unit systemd-rfkill.socket has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-rfkill.socket has finished successfully. ░░ ░░ The job identifier is 401. May 24 06:20:11 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com NetworkManager[639]: [1748082011.9449] Loaded device plugin: NMTeamFactory (/usr/lib64/NetworkManager/1.53.4-1.el9/libnm-device-plugin-team.so) May 24 06:20:11 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com NetworkManager[639]: [1748082011.9450] manager: rfkill: Wi-Fi enabled by radio killswitch; enabled by state file May 24 06:20:11 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com NetworkManager[639]: [1748082011.9450] manager: rfkill: WWAN enabled by radio killswitch; enabled by state file May 24 06:20:11 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com NetworkManager[639]: [1748082011.9451] manager: Networking is enabled by state file May 24 06:20:11 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com NetworkManager[639]: [1748082011.9458] settings: Loaded settings plugin: keyfile (internal) May 24 06:20:11 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com NetworkManager[639]: [1748082011.9496] settings: Loaded settings plugin: ifcfg-rh ("/usr/lib64/NetworkManager/1.53.4-1.el9/libnm-settings-plugin-ifcfg-rh.so") May 24 06:20:11 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com NetworkManager[639]: [1748082011.9525] Warning: the ifcfg-rh plugin is deprecated, please migrate connections to the keyfile format using "nmcli connection migrate" May 24 06:20:11 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com systemd[1]: Starting Network Manager Script Dispatcher Service... ░░ Subject: A start job for unit NetworkManager-dispatcher.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has begun execution. ░░ ░░ The job identifier is 408. May 24 06:20:11 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com NetworkManager[639]: [1748082011.9544] dhcp: init: Using DHCP client 'internal' May 24 06:20:11 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com NetworkManager[639]: [1748082011.9548] manager: (lo): new Loopback device (/org/freedesktop/NetworkManager/Devices/1) May 24 06:20:11 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com NetworkManager[639]: [1748082011.9571] device (lo): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external') May 24 06:20:11 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com NetworkManager[639]: [1748082011.9585] device (lo): state change: unavailable -> disconnected (reason 'connection-assumed', managed-type: 'external') May 24 06:20:11 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com NetworkManager[639]: [1748082011.9591] device (lo): Activation: starting connection 'lo' (d558b210-645d-4dfd-b74c-4b8e7711db54) May 24 06:20:11 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com NetworkManager[639]: [1748082011.9597] manager: (eth0): new Ethernet device (/org/freedesktop/NetworkManager/Devices/2) May 24 06:20:11 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com NetworkManager[639]: [1748082011.9601] device (eth0): state change: unmanaged -> unavailable (reason 'managed', managed-type: 'external') May 24 06:20:11 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com NetworkManager[639]: [1748082011.9615] bus-manager: acquired D-Bus service "org.freedesktop.NetworkManager" May 24 06:20:11 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com systemd[1]: Started Network Manager. ░░ Subject: A start job for unit NetworkManager.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager.service has finished successfully. ░░ ░░ The job identifier is 196. May 24 06:20:11 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com NetworkManager[639]: [1748082011.9619] device (lo): state change: disconnected -> prepare (reason 'none', managed-type: 'external') May 24 06:20:11 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com NetworkManager[639]: [1748082011.9621] device (lo): state change: prepare -> config (reason 'none', managed-type: 'external') May 24 06:20:11 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com NetworkManager[639]: [1748082011.9623] device (lo): state change: config -> ip-config (reason 'none', managed-type: 'external') May 24 06:20:11 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com NetworkManager[639]: [1748082011.9625] device (eth0): carrier: link connected May 24 06:20:11 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com NetworkManager[639]: [1748082011.9627] device (lo): state change: ip-config -> ip-check (reason 'none', managed-type: 'external') May 24 06:20:11 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com NetworkManager[639]: [1748082011.9640] device (eth0): state change: unavailable -> disconnected (reason 'carrier-changed', managed-type: 'full') May 24 06:20:11 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com NetworkManager[639]: [1748082011.9646] policy: auto-activating connection 'System eth0' (5fb06bd0-0bb0-7ffb-45f1-d6edd65f3e03) May 24 06:20:11 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com NetworkManager[639]: [1748082011.9650] device (eth0): Activation: starting connection 'System eth0' (5fb06bd0-0bb0-7ffb-45f1-d6edd65f3e03) May 24 06:20:11 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com NetworkManager[639]: [1748082011.9651] device (eth0): state change: disconnected -> prepare (reason 'none', managed-type: 'full') May 24 06:20:11 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com NetworkManager[639]: [1748082011.9653] manager: NetworkManager state is now CONNECTING May 24 06:20:11 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com NetworkManager[639]: [1748082011.9654] device (eth0): state change: prepare -> config (reason 'none', managed-type: 'full') May 24 06:20:11 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com NetworkManager[639]: [1748082011.9660] device (eth0): state change: config -> ip-config (reason 'none', managed-type: 'full') May 24 06:20:11 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com NetworkManager[639]: [1748082011.9662] dhcp4 (eth0): activation: beginning transaction (timeout in 45 seconds) May 24 06:20:11 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com NetworkManager[639]: [1748082011.9674] dhcp4 (eth0): state changed new lease, address=10.31.14.202 May 24 06:20:11 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com NetworkManager[639]: [1748082011.9682] policy: set 'System eth0' (eth0) as default for IPv4 routing and DNS May 24 06:20:11 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com systemd[1]: Reached target Network. ░░ Subject: A start job for unit network.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit network.target has finished successfully. ░░ ░░ The job identifier is 198. May 24 06:20:11 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com NetworkManager[639]: [1748082011.9748] device (eth0): state change: ip-config -> ip-check (reason 'none', managed-type: 'full') May 24 06:20:11 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com systemd[1]: Starting Network Manager Wait Online... ░░ Subject: A start job for unit NetworkManager-wait-online.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-wait-online.service has begun execution. ░░ ░░ The job identifier is 195. May 24 06:20:11 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com systemd[1]: Starting GSSAPI Proxy Daemon... ░░ Subject: A start job for unit gssproxy.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit gssproxy.service has begun execution. ░░ ░░ The job identifier is 221. May 24 06:20:12 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com systemd[1]: Started Network Manager Script Dispatcher Service. ░░ Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has finished successfully. ░░ ░░ The job identifier is 408. May 24 06:20:12 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com NetworkManager[639]: [1748082012.1589] device (lo): state change: ip-check -> secondaries (reason 'none', managed-type: 'external') May 24 06:20:12 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com NetworkManager[639]: [1748082012.1591] device (eth0): state change: ip-check -> secondaries (reason 'none', managed-type: 'full') May 24 06:20:12 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com NetworkManager[639]: [1748082012.1593] device (lo): state change: secondaries -> activated (reason 'none', managed-type: 'external') May 24 06:20:12 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com NetworkManager[639]: [1748082012.1598] device (lo): Activation: successful, device activated. May 24 06:20:12 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com NetworkManager[639]: [1748082012.1603] device (eth0): state change: secondaries -> activated (reason 'none', managed-type: 'full') May 24 06:20:12 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com NetworkManager[639]: [1748082012.1606] manager: NetworkManager state is now CONNECTED_SITE May 24 06:20:12 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com NetworkManager[639]: [1748082012.1609] device (eth0): Activation: successful, device activated. May 24 06:20:12 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com NetworkManager[639]: [1748082012.1614] manager: NetworkManager state is now CONNECTED_GLOBAL May 24 06:20:12 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com NetworkManager[639]: [1748082012.1619] manager: startup complete May 24 06:20:12 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com systemd[1]: Finished Network Manager Wait Online. ░░ Subject: A start job for unit NetworkManager-wait-online.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-wait-online.service has finished successfully. ░░ ░░ The job identifier is 195. May 24 06:20:12 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com systemd[1]: Starting Cloud-init: Network Stage... ░░ Subject: A start job for unit cloud-init.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit cloud-init.service has begun execution. ░░ ░░ The job identifier is 248. May 24 06:20:12 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com systemd[1]: Started GSSAPI Proxy Daemon. ░░ Subject: A start job for unit gssproxy.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit gssproxy.service has finished successfully. ░░ ░░ The job identifier is 221. May 24 06:20:12 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com systemd[1]: RPC security service for NFS client and server was skipped because of an unmet condition check (ConditionPathExists=/etc/krb5.keytab). ░░ Subject: A start job for unit rpc-gssd.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit rpc-gssd.service has finished successfully. ░░ ░░ The job identifier is 217. May 24 06:20:12 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com systemd[1]: Reached target NFS client services. ░░ Subject: A start job for unit nfs-client.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit nfs-client.target has finished successfully. ░░ ░░ The job identifier is 213. May 24 06:20:12 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com systemd[1]: Reached target Preparation for Remote File Systems. ░░ Subject: A start job for unit remote-fs-pre.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit remote-fs-pre.target has finished successfully. ░░ ░░ The job identifier is 215. May 24 06:20:12 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com systemd[1]: Reached target Remote File Systems. ░░ Subject: A start job for unit remote-fs.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit remote-fs.target has finished successfully. ░░ ░░ The job identifier is 212. May 24 06:20:12 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com systemd[1]: TPM2 PCR Barrier (User) was skipped because of an unmet condition check (ConditionPathExists=/sys/firmware/efi/efivars/StubPcrKernelImage-4a67b082-0a4c-41cf-b6c7-440b29bb8c4f). ░░ Subject: A start job for unit systemd-pcrphase.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-pcrphase.service has finished successfully. ░░ ░░ The job identifier is 162. May 24 06:20:12 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com chronyd[612]: Added source 10.11.160.238 May 24 06:20:12 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com chronyd[612]: Added source 10.18.100.10 May 24 06:20:12 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com chronyd[612]: Added source 10.2.32.37 May 24 06:20:12 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com chronyd[612]: Added source 10.2.32.38 May 24 06:20:12 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com cloud-init[731]: Cloud-init v. 24.4-5.el9 running 'init' at Sat, 24 May 2025 10:20:12 +0000. Up 24.43 seconds. May 24 06:20:12 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com cloud-init[731]: ci-info: ++++++++++++++++++++++++++++++++++++++Net device info++++++++++++++++++++++++++++++++++++++ May 24 06:20:12 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com cloud-init[731]: ci-info: +--------+------+----------------------------+---------------+--------+-------------------+ May 24 06:20:12 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com cloud-init[731]: ci-info: | Device | Up | Address | Mask | Scope | Hw-Address | May 24 06:20:12 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com cloud-init[731]: ci-info: +--------+------+----------------------------+---------------+--------+-------------------+ May 24 06:20:12 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com cloud-init[731]: ci-info: | eth0 | True | 10.31.14.202 | 255.255.252.0 | global | 0a:ff:c8:32:02:ad | May 24 06:20:12 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com cloud-init[731]: ci-info: | eth0 | True | fe80::8ff:c8ff:fe32:2ad/64 | . | link | 0a:ff:c8:32:02:ad | May 24 06:20:12 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com cloud-init[731]: ci-info: | lo | True | 127.0.0.1 | 255.0.0.0 | host | . | May 24 06:20:12 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com cloud-init[731]: ci-info: | lo | True | ::1/128 | . | host | . | May 24 06:20:12 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com cloud-init[731]: ci-info: +--------+------+----------------------------+---------------+--------+-------------------+ May 24 06:20:12 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com cloud-init[731]: ci-info: ++++++++++++++++++++++++++++Route IPv4 info+++++++++++++++++++++++++++++ May 24 06:20:12 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com cloud-init[731]: ci-info: +-------+-------------+------------+---------------+-----------+-------+ May 24 06:20:12 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com cloud-init[731]: ci-info: | Route | Destination | Gateway | Genmask | Interface | Flags | May 24 06:20:12 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com cloud-init[731]: ci-info: +-------+-------------+------------+---------------+-----------+-------+ May 24 06:20:12 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com cloud-init[731]: ci-info: | 0 | 0.0.0.0 | 10.31.12.1 | 0.0.0.0 | eth0 | UG | May 24 06:20:12 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com cloud-init[731]: ci-info: | 1 | 10.31.12.0 | 0.0.0.0 | 255.255.252.0 | eth0 | U | May 24 06:20:12 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com cloud-init[731]: ci-info: +-------+-------------+------------+---------------+-----------+-------+ May 24 06:20:12 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com cloud-init[731]: ci-info: +++++++++++++++++++Route IPv6 info+++++++++++++++++++ May 24 06:20:12 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com cloud-init[731]: ci-info: +-------+-------------+---------+-----------+-------+ May 24 06:20:12 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com cloud-init[731]: ci-info: | Route | Destination | Gateway | Interface | Flags | May 24 06:20:12 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com cloud-init[731]: ci-info: +-------+-------------+---------+-----------+-------+ May 24 06:20:12 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com cloud-init[731]: ci-info: | 1 | fe80::/64 | :: | eth0 | U | May 24 06:20:12 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com cloud-init[731]: ci-info: | 3 | multicast | :: | eth0 | U | May 24 06:20:12 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com cloud-init[731]: ci-info: +-------+-------------+---------+-----------+-------+ May 24 06:20:13 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com cloud-init[731]: Generating public/private rsa key pair. May 24 06:20:13 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com cloud-init[731]: Your identification has been saved in /etc/ssh/ssh_host_rsa_key May 24 06:20:13 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com cloud-init[731]: Your public key has been saved in /etc/ssh/ssh_host_rsa_key.pub May 24 06:20:13 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com cloud-init[731]: The key fingerprint is: May 24 06:20:13 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com cloud-init[731]: SHA256:2wMKRwzFQs89cnHGUEDOIPFa6kZG9glotCIPVZk1l5M root@ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com May 24 06:20:13 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com cloud-init[731]: The key's randomart image is: May 24 06:20:13 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com cloud-init[731]: +---[RSA 3072]----+ May 24 06:20:13 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com cloud-init[731]: | oo*B=.*B+ | May 24 06:20:13 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com cloud-init[731]: | o o+B.BE+. | May 24 06:20:13 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com cloud-init[731]: |+ + +.O *. | May 24 06:20:13 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com cloud-init[731]: |.= o B + . | May 24 06:20:13 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com cloud-init[731]: | . * + S | May 24 06:20:13 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com cloud-init[731]: | + o . + | May 24 06:20:13 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com cloud-init[731]: | o . . o | May 24 06:20:13 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com cloud-init[731]: | . . | May 24 06:20:13 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com cloud-init[731]: | | May 24 06:20:13 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com cloud-init[731]: +----[SHA256]-----+ May 24 06:20:13 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com cloud-init[731]: Generating public/private ecdsa key pair. May 24 06:20:13 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com cloud-init[731]: Your identification has been saved in /etc/ssh/ssh_host_ecdsa_key May 24 06:20:13 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com cloud-init[731]: Your public key has been saved in /etc/ssh/ssh_host_ecdsa_key.pub May 24 06:20:13 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com cloud-init[731]: The key fingerprint is: May 24 06:20:13 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com cloud-init[731]: SHA256:vCyk4OwEOoPBYUTrGPC6TbclSBJ0r4BKav+ppSZHwzE root@ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com May 24 06:20:13 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com cloud-init[731]: The key's randomart image is: May 24 06:20:13 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com cloud-init[731]: +---[ECDSA 256]---+ May 24 06:20:13 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com cloud-init[731]: |=+ . | May 24 06:20:13 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com cloud-init[731]: |+oo . | May 24 06:20:13 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com cloud-init[731]: |+*o . | May 24 06:20:13 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com cloud-init[731]: |O=oE. . | May 24 06:20:13 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com cloud-init[731]: |B+=.= o S | May 24 06:20:13 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com cloud-init[731]: |+O.* * . . | May 24 06:20:13 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com cloud-init[731]: |* *.+.. o | May 24 06:20:13 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com cloud-init[731]: | * o+ .. | May 24 06:20:13 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com cloud-init[731]: | =o.o | May 24 06:20:13 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com cloud-init[731]: +----[SHA256]-----+ May 24 06:20:13 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com cloud-init[731]: Generating public/private ed25519 key pair. May 24 06:20:13 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com cloud-init[731]: Your identification has been saved in /etc/ssh/ssh_host_ed25519_key May 24 06:20:13 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com cloud-init[731]: Your public key has been saved in /etc/ssh/ssh_host_ed25519_key.pub May 24 06:20:13 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com cloud-init[731]: The key fingerprint is: May 24 06:20:13 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com cloud-init[731]: SHA256:nA53QvH9NzgkUwyzLciPpUJ+f4k5zO6uG9D32cZAuuU root@ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com May 24 06:20:13 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com cloud-init[731]: The key's randomart image is: May 24 06:20:13 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com cloud-init[731]: +--[ED25519 256]--+ May 24 06:20:13 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com cloud-init[731]: | . oo. | May 24 06:20:13 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com cloud-init[731]: | .o..=. | May 24 06:20:13 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com cloud-init[731]: | ..o.*.+ | May 24 06:20:13 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com cloud-init[731]: | oo..= B.. | May 24 06:20:13 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com cloud-init[731]: | .+S=.+ =...| May 24 06:20:13 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com cloud-init[731]: | ++o= B B..| May 24 06:20:13 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com cloud-init[731]: | .. O E + | May 24 06:20:13 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com cloud-init[731]: | o o . | May 24 06:20:13 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com cloud-init[731]: | o=+ | May 24 06:20:13 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com cloud-init[731]: +----[SHA256]-----+ May 24 06:20:13 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com systemd[1]: Finished Cloud-init: Network Stage. ░░ Subject: A start job for unit cloud-init.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit cloud-init.service has finished successfully. ░░ ░░ The job identifier is 248. May 24 06:20:13 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com systemd[1]: Reached target Cloud-config availability. ░░ Subject: A start job for unit cloud-config.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit cloud-config.target has finished successfully. ░░ ░░ The job identifier is 247. May 24 06:20:13 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com systemd[1]: Reached target Network is Online. ░░ Subject: A start job for unit network-online.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit network-online.target has finished successfully. ░░ ░░ The job identifier is 194. May 24 06:20:13 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com systemd[1]: Starting Cloud-init: Config Stage... ░░ Subject: A start job for unit cloud-config.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit cloud-config.service has begun execution. ░░ ░░ The job identifier is 246. May 24 06:20:13 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com systemd[1]: Starting Crash recovery kernel arming... ░░ Subject: A start job for unit kdump.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit kdump.service has begun execution. ░░ ░░ The job identifier is 243. May 24 06:20:13 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com systemd[1]: Starting The restraint harness.... ░░ Subject: A start job for unit restraintd.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit restraintd.service has begun execution. ░░ ░░ The job identifier is 227. May 24 06:20:13 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com systemd[1]: Starting Notify NFS peers of a restart... ░░ Subject: A start job for unit rpc-statd-notify.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit rpc-statd-notify.service has begun execution. ░░ ░░ The job identifier is 214. May 24 06:20:13 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com systemd[1]: Starting System Logging Service... ░░ Subject: A start job for unit rsyslog.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit rsyslog.service has begun execution. ░░ ░░ The job identifier is 231. May 24 06:20:13 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com systemd[1]: Starting OpenSSH server daemon... ░░ Subject: A start job for unit sshd.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sshd.service has begun execution. ░░ ░░ The job identifier is 237. May 24 06:20:13 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com systemd[1]: Starting Permit User Sessions... ░░ Subject: A start job for unit systemd-user-sessions.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-user-sessions.service has begun execution. ░░ ░░ The job identifier is 250. May 24 06:20:13 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com systemd[1]: Started The restraint harness.. ░░ Subject: A start job for unit restraintd.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit restraintd.service has finished successfully. ░░ ░░ The job identifier is 227. May 24 06:20:13 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com systemd[1]: Finished Permit User Sessions. ░░ Subject: A start job for unit systemd-user-sessions.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-user-sessions.service has finished successfully. ░░ ░░ The job identifier is 250. May 24 06:20:13 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com systemd[1]: Started Command Scheduler. ░░ Subject: A start job for unit crond.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit crond.service has finished successfully. ░░ ░░ The job identifier is 226. May 24 06:20:13 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com systemd[1]: Started Getty on tty1. ░░ Subject: A start job for unit getty@tty1.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit getty@tty1.service has finished successfully. ░░ ░░ The job identifier is 204. May 24 06:20:13 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com systemd[1]: Started Serial Getty on ttyS0. ░░ Subject: A start job for unit serial-getty@ttyS0.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit serial-getty@ttyS0.service has finished successfully. ░░ ░░ The job identifier is 208. May 24 06:20:13 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com systemd[1]: Reached target Login Prompts. ░░ Subject: A start job for unit getty.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit getty.target has finished successfully. ░░ ░░ The job identifier is 203. May 24 06:20:13 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com sshd[809]: Server listening on 0.0.0.0 port 22. May 24 06:20:13 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com sshd[809]: Server listening on :: port 22. May 24 06:20:13 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com systemd[1]: Started OpenSSH server daemon. ░░ Subject: A start job for unit sshd.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sshd.service has finished successfully. ░░ ░░ The job identifier is 237. May 24 06:20:13 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com restraintd[812]: Listening on http://localhost:8081 May 24 06:20:13 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com sm-notify[806]: Version 2.5.4 starting May 24 06:20:13 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com systemd[1]: Started Notify NFS peers of a restart. ░░ Subject: A start job for unit rpc-statd-notify.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit rpc-statd-notify.service has finished successfully. ░░ ░░ The job identifier is 214. May 24 06:20:13 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com crond[813]: (CRON) STARTUP (1.5.7) May 24 06:20:13 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com crond[813]: (CRON) INFO (Syslog will be used instead of sendmail.) May 24 06:20:13 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com crond[813]: (CRON) INFO (RANDOM_DELAY will be scaled with factor 2% if used.) May 24 06:20:13 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com crond[813]: (CRON) INFO (running with inotify support) May 24 06:20:13 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com cloud-init[865]: Cloud-init v. 24.4-5.el9 running 'modules:config' at Sat, 24 May 2025 10:20:13 +0000. Up 25.90 seconds. May 24 06:20:14 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com sshd[809]: Received signal 15; terminating. May 24 06:20:14 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com systemd[1]: Stopping OpenSSH server daemon... ░░ Subject: A stop job for unit sshd.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit sshd.service has begun execution. ░░ ░░ The job identifier is 488. May 24 06:20:14 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com systemd[1]: sshd.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit sshd.service has successfully entered the 'dead' state. May 24 06:20:14 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com systemd[1]: Stopped OpenSSH server daemon. ░░ Subject: A stop job for unit sshd.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit sshd.service has finished. ░░ ░░ The job identifier is 488 and the job result is done. May 24 06:20:14 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com systemd[1]: Starting OpenSSH server daemon... ░░ Subject: A start job for unit sshd.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sshd.service has begun execution. ░░ ░░ The job identifier is 488. May 24 06:20:14 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com rsyslogd[808]: [origin software="rsyslogd" swVersion="8.2412.0-2.el9" x-pid="808" x-info="https://www.rsyslog.com"] start May 24 06:20:14 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com systemd[1]: Started System Logging Service. ░░ Subject: A start job for unit rsyslog.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit rsyslog.service has finished successfully. ░░ ░░ The job identifier is 231. May 24 06:20:14 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com systemd[1]: Reached target Multi-User System. ░░ Subject: A start job for unit multi-user.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit multi-user.target has finished successfully. ░░ ░░ The job identifier is 116. May 24 06:20:14 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com systemd[1]: Starting Record Runlevel Change in UTMP... ░░ Subject: A start job for unit systemd-update-utmp-runlevel.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-update-utmp-runlevel.service has begun execution. ░░ ░░ The job identifier is 229. May 24 06:20:14 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com systemd[1]: systemd-update-utmp-runlevel.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-update-utmp-runlevel.service has successfully entered the 'dead' state. May 24 06:20:14 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com systemd[1]: Finished Record Runlevel Change in UTMP. ░░ Subject: A start job for unit systemd-update-utmp-runlevel.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-update-utmp-runlevel.service has finished successfully. ░░ ░░ The job identifier is 229. May 24 06:20:14 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com sshd[868]: Server listening on 0.0.0.0 port 22. May 24 06:20:14 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com sshd[868]: Server listening on :: port 22. May 24 06:20:14 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com systemd[1]: Started OpenSSH server daemon. ░░ Subject: A start job for unit sshd.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sshd.service has finished successfully. ░░ ░░ The job identifier is 488. May 24 06:20:14 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com rsyslogd[808]: imjournal: journal files changed, reloading... [v8.2412.0-2.el9 try https://www.rsyslog.com/e/0 ] May 24 06:20:14 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com systemd[1]: Finished Cloud-init: Config Stage. ░░ Subject: A start job for unit cloud-config.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit cloud-config.service has finished successfully. ░░ ░░ The job identifier is 246. May 24 06:20:14 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com systemd[1]: Starting Cloud-init: Final Stage... ░░ Subject: A start job for unit cloud-final.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit cloud-final.service has begun execution. ░░ ░░ The job identifier is 249. May 24 06:20:14 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com kdumpctl[819]: kdump: Detected change(s) in the following file(s): /etc/fstab May 24 06:20:14 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com cloud-init[1103]: Cloud-init v. 24.4-5.el9 running 'modules:final' at Sat, 24 May 2025 10:20:14 +0000. Up 26.43 seconds. May 24 06:20:14 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com cloud-init[1105]: ############################################################# May 24 06:20:14 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com cloud-init[1106]: -----BEGIN SSH HOST KEY FINGERPRINTS----- May 24 06:20:14 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com cloud-init[1108]: 256 SHA256:vCyk4OwEOoPBYUTrGPC6TbclSBJ0r4BKav+ppSZHwzE root@ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com (ECDSA) May 24 06:20:14 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com cloud-init[1110]: 256 SHA256:nA53QvH9NzgkUwyzLciPpUJ+f4k5zO6uG9D32cZAuuU root@ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com (ED25519) May 24 06:20:14 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com cloud-init[1112]: 3072 SHA256:2wMKRwzFQs89cnHGUEDOIPFa6kZG9glotCIPVZk1l5M root@ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com (RSA) May 24 06:20:14 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com cloud-init[1113]: -----END SSH HOST KEY FINGERPRINTS----- May 24 06:20:14 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com cloud-init[1114]: ############################################################# May 24 06:20:14 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com cloud-init[1103]: Cloud-init v. 24.4-5.el9 finished at Sat, 24 May 2025 10:20:14 +0000. Datasource DataSourceEc2Local. Up 26.54 seconds May 24 06:20:14 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com systemd[1]: Finished Cloud-init: Final Stage. ░░ Subject: A start job for unit cloud-final.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit cloud-final.service has finished successfully. ░░ ░░ The job identifier is 249. May 24 06:20:14 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com systemd[1]: Reached target Cloud-init target. ░░ Subject: A start job for unit cloud-init.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit cloud-init.target has finished successfully. ░░ ░░ The job identifier is 244. May 24 06:20:15 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com irqbalance[601]: Cannot change IRQ 0 affinity: Operation not permitted May 24 06:20:15 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com irqbalance[601]: IRQ 0 affinity is now unmanaged May 24 06:20:15 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com irqbalance[601]: Cannot change IRQ 48 affinity: Operation not permitted May 24 06:20:15 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com irqbalance[601]: IRQ 48 affinity is now unmanaged May 24 06:20:15 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com irqbalance[601]: Cannot change IRQ 49 affinity: Operation not permitted May 24 06:20:15 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com irqbalance[601]: IRQ 49 affinity is now unmanaged May 24 06:20:15 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com irqbalance[601]: Cannot change IRQ 50 affinity: Operation not permitted May 24 06:20:15 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com irqbalance[601]: IRQ 50 affinity is now unmanaged May 24 06:20:15 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com irqbalance[601]: Cannot change IRQ 51 affinity: Operation not permitted May 24 06:20:15 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com irqbalance[601]: IRQ 51 affinity is now unmanaged May 24 06:20:15 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com irqbalance[601]: Cannot change IRQ 52 affinity: Operation not permitted May 24 06:20:15 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com irqbalance[601]: IRQ 52 affinity is now unmanaged May 24 06:20:15 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com irqbalance[601]: Cannot change IRQ 53 affinity: Operation not permitted May 24 06:20:15 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com irqbalance[601]: IRQ 53 affinity is now unmanaged May 24 06:20:15 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com irqbalance[601]: Cannot change IRQ 54 affinity: Operation not permitted May 24 06:20:15 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com irqbalance[601]: IRQ 54 affinity is now unmanaged May 24 06:20:15 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com irqbalance[601]: Cannot change IRQ 55 affinity: Operation not permitted May 24 06:20:15 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com irqbalance[601]: IRQ 55 affinity is now unmanaged May 24 06:20:15 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com irqbalance[601]: Cannot change IRQ 56 affinity: Operation not permitted May 24 06:20:15 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com irqbalance[601]: IRQ 56 affinity is now unmanaged May 24 06:20:15 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com irqbalance[601]: Cannot change IRQ 57 affinity: Operation not permitted May 24 06:20:15 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com irqbalance[601]: IRQ 57 affinity is now unmanaged May 24 06:20:15 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com irqbalance[601]: Cannot change IRQ 58 affinity: Operation not permitted May 24 06:20:15 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com irqbalance[601]: IRQ 58 affinity is now unmanaged May 24 06:20:15 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com irqbalance[601]: Cannot change IRQ 59 affinity: Operation not permitted May 24 06:20:15 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com irqbalance[601]: IRQ 59 affinity is now unmanaged May 24 06:20:18 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com chronyd[612]: Selected source 96.231.54.40 (2.centos.pool.ntp.org) May 24 06:20:18 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com chronyd[612]: System clock TAI offset set to 37 seconds May 24 06:20:20 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com kernel: block xvda: the capability attribute has been deprecated. May 24 06:20:20 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com kdumpctl[819]: kdump: Rebuilding /boot/initramfs-5.14.0-583.el9.x86_64kdump.img May 24 06:20:21 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1450]: dracut-057-87.git20250311.el9 May 24 06:20:21 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: Executing: /usr/bin/dracut --quiet --hostonly --hostonly-cmdline --hostonly-i18n --hostonly-mode strict --hostonly-nics --mount "/dev/disk/by-uuid/5de771f1-9acb-4b75-bf56-ae0a7779ba73 /sysroot xfs rw,relatime,seclabel,attr2,inode64,logbufs=8,logbsize=32k,noquota" --squash-compressor zstd --no-hostonly-default-device --add-confdir /lib/kdump/dracut.conf.d -f /boot/initramfs-5.14.0-583.el9.x86_64kdump.img 5.14.0-583.el9.x86_64 May 24 06:20:22 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state. May 24 06:20:22 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: dracut module 'systemd-networkd' will not be installed, because command 'networkctl' could not be found! May 24 06:20:22 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: dracut module 'systemd-networkd' will not be installed, because command '/usr/lib/systemd/systemd-networkd' could not be found! May 24 06:20:22 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: dracut module 'systemd-networkd' will not be installed, because command '/usr/lib/systemd/systemd-networkd-wait-online' could not be found! May 24 06:20:22 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: dracut module 'systemd-resolved' will not be installed, because command 'resolvectl' could not be found! May 24 06:20:22 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: dracut module 'systemd-resolved' will not be installed, because command '/usr/lib/systemd/systemd-resolved' could not be found! May 24 06:20:22 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: dracut module 'systemd-timesyncd' will not be installed, because command '/usr/lib/systemd/systemd-timesyncd' could not be found! May 24 06:20:22 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: dracut module 'systemd-timesyncd' will not be installed, because command '/usr/lib/systemd/systemd-time-wait-sync' could not be found! May 24 06:20:22 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: dracut module 'busybox' will not be installed, because command 'busybox' could not be found! May 24 06:20:22 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: dracut module 'dbus-daemon' will not be installed, because command 'dbus-daemon' could not be found! May 24 06:20:22 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: dracut module 'connman' will not be installed, because command 'connmand' could not be found! May 24 06:20:22 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: dracut module 'connman' will not be installed, because command 'connmanctl' could not be found! May 24 06:20:22 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: dracut module 'connman' will not be installed, because command 'connmand-wait-online' could not be found! May 24 06:20:22 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: dracut module 'network-wicked' will not be installed, because command 'wicked' could not be found! May 24 06:20:22 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: Module 'ifcfg' will not be installed, because it's in the list to be omitted! May 24 06:20:22 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: Module 'plymouth' will not be installed, because it's in the list to be omitted! May 24 06:20:22 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: 62bluetooth: Could not find any command of '/usr/lib/bluetooth/bluetoothd /usr/libexec/bluetooth/bluetoothd'! May 24 06:20:22 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: dracut module 'lvmmerge' will not be installed, because command 'lvm' could not be found! May 24 06:20:22 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: dracut module 'lvmthinpool-monitor' will not be installed, because command 'lvm' could not be found! May 24 06:20:22 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: dracut module 'btrfs' will not be installed, because command 'btrfs' could not be found! May 24 06:20:22 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: dracut module 'dmraid' will not be installed, because command 'dmraid' could not be found! May 24 06:20:22 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: dracut module 'lvm' will not be installed, because command 'lvm' could not be found! May 24 06:20:22 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: dracut module 'mdraid' will not be installed, because command 'mdadm' could not be found! May 24 06:20:22 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: dracut module 'pcsc' will not be installed, because command 'pcscd' could not be found! May 24 06:20:22 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: dracut module 'tpm2-tss' will not be installed, because command 'tpm2' could not be found! May 24 06:20:22 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: dracut module 'cifs' will not be installed, because command 'mount.cifs' could not be found! May 24 06:20:22 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: dracut module 'iscsi' will not be installed, because command 'iscsi-iname' could not be found! May 24 06:20:22 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: dracut module 'iscsi' will not be installed, because command 'iscsiadm' could not be found! May 24 06:20:22 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: dracut module 'iscsi' will not be installed, because command 'iscsid' could not be found! May 24 06:20:23 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: dracut module 'nvmf' will not be installed, because command 'nvme' could not be found! May 24 06:20:23 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: Module 'resume' will not be installed, because it's in the list to be omitted! May 24 06:20:23 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: dracut module 'biosdevname' will not be installed, because command 'biosdevname' could not be found! May 24 06:20:23 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: Module 'earlykdump' will not be installed, because it's in the list to be omitted! May 24 06:20:23 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: dracut module 'memstrack' will not be installed, because command 'memstrack' could not be found! May 24 06:20:23 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: memstrack is not available May 24 06:20:23 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: If you need to use rd.memdebug>=4, please install memstrack and procps-ng May 24 06:20:23 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: dracut module 'systemd-resolved' will not be installed, because command 'resolvectl' could not be found! May 24 06:20:23 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: dracut module 'systemd-resolved' will not be installed, because command '/usr/lib/systemd/systemd-resolved' could not be found! May 24 06:20:23 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: dracut module 'systemd-timesyncd' will not be installed, because command '/usr/lib/systemd/systemd-timesyncd' could not be found! May 24 06:20:23 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: dracut module 'systemd-timesyncd' will not be installed, because command '/usr/lib/systemd/systemd-time-wait-sync' could not be found! May 24 06:20:23 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: dracut module 'busybox' will not be installed, because command 'busybox' could not be found! May 24 06:20:23 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: dracut module 'dbus-daemon' will not be installed, because command 'dbus-daemon' could not be found! May 24 06:20:23 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: dracut module 'connman' will not be installed, because command 'connmand' could not be found! May 24 06:20:23 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: dracut module 'connman' will not be installed, because command 'connmanctl' could not be found! May 24 06:20:23 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: dracut module 'connman' will not be installed, because command 'connmand-wait-online' could not be found! May 24 06:20:23 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: dracut module 'network-wicked' will not be installed, because command 'wicked' could not be found! May 24 06:20:23 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: 62bluetooth: Could not find any command of '/usr/lib/bluetooth/bluetoothd /usr/libexec/bluetooth/bluetoothd'! May 24 06:20:23 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: dracut module 'lvmmerge' will not be installed, because command 'lvm' could not be found! May 24 06:20:23 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: dracut module 'lvmthinpool-monitor' will not be installed, because command 'lvm' could not be found! May 24 06:20:23 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: dracut module 'btrfs' will not be installed, because command 'btrfs' could not be found! May 24 06:20:23 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: dracut module 'dmraid' will not be installed, because command 'dmraid' could not be found! May 24 06:20:23 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: dracut module 'lvm' will not be installed, because command 'lvm' could not be found! May 24 06:20:23 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: dracut module 'mdraid' will not be installed, because command 'mdadm' could not be found! May 24 06:20:23 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: dracut module 'pcsc' will not be installed, because command 'pcscd' could not be found! May 24 06:20:23 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: dracut module 'tpm2-tss' will not be installed, because command 'tpm2' could not be found! May 24 06:20:23 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: dracut module 'cifs' will not be installed, because command 'mount.cifs' could not be found! May 24 06:20:23 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: dracut module 'iscsi' will not be installed, because command 'iscsi-iname' could not be found! May 24 06:20:23 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: dracut module 'iscsi' will not be installed, because command 'iscsiadm' could not be found! May 24 06:20:23 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: dracut module 'iscsi' will not be installed, because command 'iscsid' could not be found! May 24 06:20:23 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: dracut module 'nvmf' will not be installed, because command 'nvme' could not be found! May 24 06:20:23 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: dracut module 'memstrack' will not be installed, because command 'memstrack' could not be found! May 24 06:20:23 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: memstrack is not available May 24 06:20:23 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: If you need to use rd.memdebug>=4, please install memstrack and procps-ng May 24 06:20:23 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: *** Including module: systemd *** May 24 06:20:23 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: *** Including module: fips *** May 24 06:20:24 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: *** Including module: systemd-initrd *** May 24 06:20:24 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: *** Including module: rngd *** May 24 06:20:24 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: *** Including module: i18n *** May 24 06:20:24 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: *** Including module: drm *** May 24 06:20:24 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: *** Including module: prefixdevname *** May 24 06:20:24 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: *** Including module: kernel-modules *** May 24 06:20:25 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: *** Including module: kernel-modules-extra *** May 24 06:20:25 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: kernel-modules-extra: configuration source "/run/depmod.d" does not exist May 24 06:20:25 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: kernel-modules-extra: configuration source "/lib/depmod.d" does not exist May 24 06:20:25 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: kernel-modules-extra: parsing configuration file "/etc/depmod.d/dist.conf" May 24 06:20:25 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: kernel-modules-extra: /etc/depmod.d/dist.conf: added "updates extra built-in weak-updates" to the list of search directories May 24 06:20:25 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: *** Including module: fstab-sys *** May 24 06:20:25 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: *** Including module: rootfs-block *** May 24 06:20:25 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: *** Including module: terminfo *** May 24 06:20:25 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: *** Including module: udev-rules *** May 24 06:20:25 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: Skipping udev rule: 91-permissions.rules May 24 06:20:25 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: Skipping udev rule: 80-drivers-modprobe.rules May 24 06:20:25 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: *** Including module: dracut-systemd *** May 24 06:20:25 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: *** Including module: usrmount *** May 24 06:20:25 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: *** Including module: base *** May 24 06:20:25 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: *** Including module: fs-lib *** May 24 06:20:25 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: *** Including module: kdumpbase *** May 24 06:20:25 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: *** Including module: microcode_ctl-fw_dir_override *** May 24 06:20:25 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: microcode_ctl module: mangling fw_dir May 24 06:20:26 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: microcode_ctl: reset fw_dir to "/lib/firmware/updates /lib/firmware" May 24 06:20:26 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: microcode_ctl: processing data directory "/usr/share/microcode_ctl/ucode_with_caveats/intel"... May 24 06:20:26 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: microcode_ctl: intel: caveats check for kernel version "5.14.0-583.el9.x86_64" passed, adding "/usr/share/microcode_ctl/ucode_with_caveats/intel" to fw_dir variable May 24 06:20:26 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: microcode_ctl: processing data directory "/usr/share/microcode_ctl/ucode_with_caveats/intel-06-2d-07"... May 24 06:20:26 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: microcode_ctl: configuration "intel-06-2d-07" is ignored May 24 06:20:26 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: microcode_ctl: processing data directory "/usr/share/microcode_ctl/ucode_with_caveats/intel-06-4e-03"... May 24 06:20:26 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: microcode_ctl: configuration "intel-06-4e-03" is ignored May 24 06:20:26 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: microcode_ctl: processing data directory "/usr/share/microcode_ctl/ucode_with_caveats/intel-06-4f-01"... May 24 06:20:26 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: microcode_ctl: configuration "intel-06-4f-01" is ignored May 24 06:20:26 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: microcode_ctl: processing data directory "/usr/share/microcode_ctl/ucode_with_caveats/intel-06-55-04"... May 24 06:20:26 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: microcode_ctl: configuration "intel-06-55-04" is ignored May 24 06:20:26 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: microcode_ctl: processing data directory "/usr/share/microcode_ctl/ucode_with_caveats/intel-06-5e-03"... May 24 06:20:26 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: microcode_ctl: configuration "intel-06-5e-03" is ignored May 24 06:20:26 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: microcode_ctl: processing data directory "/usr/share/microcode_ctl/ucode_with_caveats/intel-06-8c-01"... May 24 06:20:26 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: microcode_ctl: configuration "intel-06-8c-01" is ignored May 24 06:20:26 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: microcode_ctl: processing data directory "/usr/share/microcode_ctl/ucode_with_caveats/intel-06-8e-9e-0x-0xca"... May 24 06:20:26 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: microcode_ctl: configuration "intel-06-8e-9e-0x-0xca" is ignored May 24 06:20:26 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: microcode_ctl: processing data directory "/usr/share/microcode_ctl/ucode_with_caveats/intel-06-8e-9e-0x-dell"... May 24 06:20:26 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: microcode_ctl: configuration "intel-06-8e-9e-0x-dell" is ignored May 24 06:20:26 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: microcode_ctl: final fw_dir: "/usr/share/microcode_ctl/ucode_with_caveats/intel /lib/firmware/updates /lib/firmware" May 24 06:20:26 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: *** Including module: openssl *** May 24 06:20:26 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: *** Including module: shutdown *** May 24 06:20:26 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: *** Including module: squash *** May 24 06:20:26 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: *** Including modules done *** May 24 06:20:26 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: *** Installing kernel module dependencies *** May 24 06:20:27 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: *** Installing kernel module dependencies done *** May 24 06:20:27 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: *** Resolving executable dependencies *** May 24 06:20:29 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: *** Resolving executable dependencies done *** May 24 06:20:29 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: *** Hardlinking files *** May 24 06:20:29 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: Mode: real May 24 06:20:29 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: Files: 449 May 24 06:20:29 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: Linked: 2 files May 24 06:20:29 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: Compared: 0 xattrs May 24 06:20:29 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: Compared: 12 files May 24 06:20:29 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: Saved: 56.21 KiB May 24 06:20:29 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: Duration: 0.007000 seconds May 24 06:20:29 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: *** Hardlinking files done *** May 24 06:20:29 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: *** Generating early-microcode cpio image *** May 24 06:20:29 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: *** Constructing GenuineIntel.bin *** May 24 06:20:29 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: *** Constructing GenuineIntel.bin *** May 24 06:20:29 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: *** Store current command line parameters *** May 24 06:20:29 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: Stored kernel commandline: May 24 06:20:29 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: No dracut internal kernel commandline stored in the initramfs May 24 06:20:29 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: *** Install squash loader *** May 24 06:20:29 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: *** Squashing the files inside the initramfs *** May 24 06:20:35 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: *** Squashing the files inside the initramfs done *** May 24 06:20:35 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: *** Creating image file '/boot/initramfs-5.14.0-583.el9.x86_64kdump.img' *** May 24 06:20:35 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com dracut[1452]: *** Creating initramfs image file '/boot/initramfs-5.14.0-583.el9.x86_64kdump.img' done *** May 24 06:20:36 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com kdumpctl[819]: kdump: kexec: loaded kdump kernel May 24 06:20:36 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com kdumpctl[819]: kdump: Starting kdump: [OK] May 24 06:20:36 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com systemd[1]: Finished Crash recovery kernel arming. ░░ Subject: A start job for unit kdump.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit kdump.service has finished successfully. ░░ ░░ The job identifier is 243. May 24 06:20:36 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com systemd[1]: Startup finished in 1.136s (kernel) + 5.341s (initrd) + 42.292s (userspace) = 48.769s. ░░ Subject: System start-up is now complete ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ All system services necessary queued for starting at boot have been ░░ started. Note that this does not mean that the machine is now idle as services ░░ might still be busy with completing start-up. ░░ ░░ Kernel start-up required 1136500 microseconds. ░░ ░░ Initrd start-up required 5341109 microseconds. ░░ ░░ Userspace start-up required 42292122 microseconds. May 24 06:20:41 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com systemd[1]: systemd-hostnamed.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-hostnamed.service has successfully entered the 'dead' state. May 24 06:21:45 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com sshd[4247]: Accepted publickey for root from 10.30.32.164 port 55144 ssh2: RSA SHA256:W3cSdmPJK+d9RwU97ardijPXIZnxHswrpTHWW9oYtEU May 24 06:21:45 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com systemd[1]: Created slice User Slice of UID 0. ░░ Subject: A start job for unit user-0.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user-0.slice has finished successfully. ░░ ░░ The job identifier is 490. May 24 06:21:45 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com systemd[1]: Starting User Runtime Directory /run/user/0... ░░ Subject: A start job for unit user-runtime-dir@0.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user-runtime-dir@0.service has begun execution. ░░ ░░ The job identifier is 556. May 24 06:21:45 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com systemd-logind[603]: New session 1 of user root. ░░ Subject: A new session 1 has been created for user root ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A new session with the ID 1 has been created for the user root. ░░ ░░ The leading process of the session is 4247. May 24 06:21:45 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com systemd[1]: Finished User Runtime Directory /run/user/0. ░░ Subject: A start job for unit user-runtime-dir@0.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user-runtime-dir@0.service has finished successfully. ░░ ░░ The job identifier is 556. May 24 06:21:45 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com systemd[1]: Starting User Manager for UID 0... ░░ Subject: A start job for unit user@0.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user@0.service has begun execution. ░░ ░░ The job identifier is 489. May 24 06:21:45 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com systemd[4251]: pam_unix(systemd-user:session): session opened for user root(uid=0) by root(uid=0) May 24 06:21:45 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com systemd[4251]: Queued start job for default target Main User Target. May 24 06:21:45 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com systemd[4251]: Created slice User Application Slice. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 4. May 24 06:21:45 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com systemd[4251]: Mark boot as successful after the user session has run 2 minutes was skipped because of an unmet condition check (ConditionUser=!@system). ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 9. May 24 06:21:45 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com systemd[4251]: Started Daily Cleanup of User's Temporary Directories. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 8. May 24 06:21:45 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com systemd[4251]: Reached target Paths. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 12. May 24 06:21:45 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com systemd[4251]: Reached target Timers. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 7. May 24 06:21:45 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com systemd[4251]: Starting D-Bus User Message Bus Socket... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 11. May 24 06:21:45 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com systemd[4251]: Starting Create User's Volatile Files and Directories... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 3. May 24 06:21:45 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com systemd[4251]: Listening on D-Bus User Message Bus Socket. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 11. May 24 06:21:45 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com systemd[4251]: Finished Create User's Volatile Files and Directories. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 3. May 24 06:21:45 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com systemd[4251]: Reached target Sockets. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 10. May 24 06:21:45 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com systemd[4251]: Reached target Basic System. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 2. May 24 06:21:45 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com systemd[4251]: Reached target Main User Target. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 1. May 24 06:21:45 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com systemd[4251]: Startup finished in 75ms. ░░ Subject: User manager start-up is now complete ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The user manager instance for user 0 has been started. All services queued ░░ for starting have been started. Note that other services might still be starting ░░ up or be started at any later time. ░░ ░░ Startup of the manager took 75195 microseconds. May 24 06:21:45 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com systemd[1]: Started User Manager for UID 0. ░░ Subject: A start job for unit user@0.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user@0.service has finished successfully. ░░ ░░ The job identifier is 489. May 24 06:21:45 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com systemd[1]: Started Session 1 of User root. ░░ Subject: A start job for unit session-1.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit session-1.scope has finished successfully. ░░ ░░ The job identifier is 557. May 24 06:21:45 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com sshd[4247]: pam_unix(sshd:session): session opened for user root(uid=0) by root(uid=0) May 24 06:21:45 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com sshd[4260]: Received disconnect from 10.30.32.164 port 55144:11: disconnected by user May 24 06:21:45 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com sshd[4260]: Disconnected from user root 10.30.32.164 port 55144 May 24 06:21:45 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com sshd[4247]: pam_unix(sshd:session): session closed for user root May 24 06:21:45 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com systemd-logind[603]: Session 1 logged out. Waiting for processes to exit. May 24 06:21:45 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com systemd[1]: session-1.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit session-1.scope has successfully entered the 'dead' state. May 24 06:21:45 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com systemd-logind[603]: Removed session 1. ░░ Subject: Session 1 has been terminated ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A session with the ID 1 has been terminated. May 24 06:21:48 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com sshd[4297]: Accepted publickey for root from 10.31.10.207 port 57084 ssh2: RSA SHA256:W3cSdmPJK+d9RwU97ardijPXIZnxHswrpTHWW9oYtEU May 24 06:21:48 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com sshd[4296]: Accepted publickey for root from 10.31.10.207 port 57070 ssh2: RSA SHA256:W3cSdmPJK+d9RwU97ardijPXIZnxHswrpTHWW9oYtEU May 24 06:21:48 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com systemd-logind[603]: New session 3 of user root. ░░ Subject: A new session 3 has been created for user root ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A new session with the ID 3 has been created for the user root. ░░ ░░ The leading process of the session is 4297. May 24 06:21:48 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com systemd[1]: Started Session 3 of User root. ░░ Subject: A start job for unit session-3.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit session-3.scope has finished successfully. ░░ ░░ The job identifier is 626. May 24 06:21:48 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com systemd-logind[603]: New session 4 of user root. ░░ Subject: A new session 4 has been created for user root ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A new session with the ID 4 has been created for the user root. ░░ ░░ The leading process of the session is 4296. May 24 06:21:48 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com systemd[1]: Started Session 4 of User root. ░░ Subject: A start job for unit session-4.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit session-4.scope has finished successfully. ░░ ░░ The job identifier is 695. May 24 06:21:48 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com sshd[4297]: pam_unix(sshd:session): session opened for user root(uid=0) by root(uid=0) May 24 06:21:48 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com sshd[4296]: pam_unix(sshd:session): session opened for user root(uid=0) by root(uid=0) May 24 06:21:49 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com sshd[4302]: Received disconnect from 10.31.10.207 port 57084:11: disconnected by user May 24 06:21:49 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com sshd[4302]: Disconnected from user root 10.31.10.207 port 57084 May 24 06:21:49 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com sshd[4297]: pam_unix(sshd:session): session closed for user root May 24 06:21:49 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com systemd[1]: session-3.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit session-3.scope has successfully entered the 'dead' state. May 24 06:21:49 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com systemd-logind[603]: Session 3 logged out. Waiting for processes to exit. May 24 06:21:49 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com systemd-logind[603]: Removed session 3. ░░ Subject: Session 3 has been terminated ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A session with the ID 3 has been terminated. May 24 06:22:52 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com unknown: Running test '/Prepare-managed-node/tests/prep_managed_node' (serial number 1) with reboot count 0 and test restart count 0. (Be aware the test name is sanitized!) May 24 06:22:52 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com systemd[1]: Starting Hostname Service... ░░ Subject: A start job for unit systemd-hostnamed.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-hostnamed.service has begun execution. ░░ ░░ The job identifier is 765. May 24 06:22:52 ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com systemd[1]: Started Hostname Service. ░░ Subject: A start job for unit systemd-hostnamed.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-hostnamed.service has finished successfully. ░░ ░░ The job identifier is 765. May 24 06:22:52 managed-node2 systemd-hostnamed[6137]: Hostname set to (static) May 24 06:22:52 managed-node2 NetworkManager[639]: [1748082172.7838] hostname: static hostname changed from "ip-10-31-14-202.testing-farm.us-east-1.aws.redhat.com" to "managed-node2" May 24 06:22:52 managed-node2 systemd[1]: Starting Network Manager Script Dispatcher Service... ░░ Subject: A start job for unit NetworkManager-dispatcher.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has begun execution. ░░ ░░ The job identifier is 831. May 24 06:22:52 managed-node2 systemd[1]: Started Network Manager Script Dispatcher Service. ░░ Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has finished successfully. ░░ ░░ The job identifier is 831. May 24 06:22:53 managed-node2 unknown: Leaving test '/Prepare-managed-node/tests/prep_managed_node' (serial number 1). (Be aware the test name is sanitized!) May 24 06:23:02 managed-node2 systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state. May 24 06:23:22 managed-node2 systemd[1]: systemd-hostnamed.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-hostnamed.service has successfully entered the 'dead' state. May 24 06:23:50 managed-node2 sshd[6834]: Accepted publickey for root from 10.31.44.184 port 51444 ssh2: RSA SHA256:9j1blwt3wcrRiGYZQ7ZGu9axm3cDklH6/z4c+Ee8CzE May 24 06:23:50 managed-node2 systemd-logind[603]: New session 5 of user root. ░░ Subject: A new session 5 has been created for user root ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A new session with the ID 5 has been created for the user root. ░░ ░░ The leading process of the session is 6834. May 24 06:23:50 managed-node2 systemd[1]: Started Session 5 of User root. ░░ Subject: A start job for unit session-5.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit session-5.scope has finished successfully. ░░ ░░ The job identifier is 897. May 24 06:23:50 managed-node2 sshd[6834]: pam_unix(sshd:session): session opened for user root(uid=0) by root(uid=0) May 24 06:23:51 managed-node2 python3.9[7011]: ansible-ansible.legacy.setup Invoked with gather_subset=['all'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d May 24 06:23:52 managed-node2 python3.9[7186]: ansible-service_facts Invoked May 24 06:23:55 managed-node2 python3.9[7418]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 May 24 06:23:56 managed-node2 python3.9[7567]: ansible-ansible.legacy.dnf Invoked with name=['pcp-pmda-bpftrace', 'bpftrace'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None May 24 06:24:01 managed-node2 python3.9[7749]: ansible-ansible.legacy.command Invoked with _raw_params=journalctl -ex echo '##################' echo List of SELinux AVCs - note list may be empty grep type=AVC /var/log/audit/audit.log echo '##################' ls -alrtF /run if [ -d /run/pcp ]; then ls -alrtF /run/pcp else echo ERROR - /run/pcp does not exist fi _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None ################## List of SELinux AVCs - note list may be empty ################## total 40 dr-xr-xr-x. 18 root root 235 May 14 07:11 ../ drwxr-xr-x. 2 root root 60 May 24 06:19 tmpfiles.d/ drwxr-xr-x. 3 root root 60 May 24 06:19 log/ drwxr-xr-x. 2 root root 40 May 24 06:19 mount/ drwxr-xr-x. 4 root root 100 May 24 06:19 initramfs/ -r--r--r--. 1 root root 33 May 24 06:19 machine-id srw-rw-rw-. 1 root root 0 May 24 06:20 rpcbind.sock= prw-------. 1 root root 0 May 24 06:20 initctl| drwxr-xr-x. 5 root root 100 May 24 06:20 credentials/ drwx------. 2 root root 40 May 24 06:20 cryptsetup/ drwxr-xr-x. 2 root root 40 May 24 06:20 setrans/ drwxr-xr-x. 2 root root 40 May 24 06:20 sepermit/ drwxr-xr-x. 2 root root 40 May 24 06:20 faillock/ drwxr-xr-x. 2 root root 40 May 24 06:20 console/ drwxr-xr-x. 2 root root 40 May 24 06:20 motd.d/ drwx--x--x. 3 root root 60 May 24 06:20 sudo/ -rw-r--r--. 1 root root 0 May 24 06:20 motd drwxr-xr-x. 3 root root 60 May 24 06:20 tpm2-tss/ drwx------. 2 rpc rpc 60 May 24 06:20 rpcbind/ -rw-r--r--. 1 root root 4 May 24 06:20 auditd.pid drwxr-xr-x. 2 root root 60 May 24 06:20 dbus/ srw-rw-rw-. 1 root root 0 May 24 06:20 .heim_org.h5l.kcm-socket= drwxr-xr-x. 2 root root 60 May 24 06:20 irqbalance/ -rw-r--r--. 1 root root 4 May 24 06:20 dhclient.pid -rw-r--r--. 1 root root 632 May 24 06:20 dhclient.lease -rw-------. 1 root root 4 May 24 06:20 gssproxy.pid srw-rw-rw-. 1 root root 0 May 24 06:20 gssproxy.sock= drwxr-xr-x. 2 root root 60 May 24 06:20 chrony-dhcp/ drwxr-x---. 2 chrony chrony 80 May 24 06:20 chrony/ drwxr-xr-x. 3 root root 80 May 24 06:20 lock/ -rw-------. 1 root root 4 May 24 06:20 sm-notify.pid -rw-r--r--. 1 root root 4 May 24 06:20 crond.pid ----------. 1 root root 0 May 24 06:20 cron.reboot -rw-------. 1 root root 3 May 24 06:20 rsyslogd.pid -rw-r--r--. 1 root root 4 May 24 06:20 sshd.pid drwxr-xr-x. 3 root root 360 May 24 06:20 cloud-init/ -rw-------. 1 root root 0 May 24 06:20 agetty.reload drwxr-xr-x. 27 root root 880 May 24 06:20 ./ drwxr-xr-x. 2 root root 80 May 24 06:20 blkid/ drwxr-xr-x. 21 root root 540 May 24 06:21 systemd/ drwxr-xr-x. 3 root root 60 May 24 06:21 user/ drwxr-xr-x. 7 root root 160 May 24 06:22 udev/ drwxr-xr-x. 6 root root 160 May 24 06:22 NetworkManager/ -rw-rw-r--. 1 root utmp 1920 May 24 06:24 utmp ERROR - /run/pcp does not exist TASK [Reraise error] *********************************************************** task path: /tmp/collections-MIG/ansible_collections/fedora/linux_system_roles/tests/metrics/handle_test_failure.yml:17 Saturday 24 May 2025 06:24:01 -0400 (0:00:00.508) 0:00:11.175 ********** fatal: [managed-node2]: FAILED! => { "changed": false } MSG: {'results': [], 'rc': 1, 'failed': True, 'msg': "Failed to download metadata for repo 'highavailability': Cannot download repomd.xml: Cannot download repodata/repomd.xml: All mirrors were tried", 'invocation': {'module_args': {'name': ['pcp-pmda-bpftrace', 'bpftrace'], 'state': 'present', 'allow_downgrade': False, 'allowerasing': False, 'autoremove': False, 'bugfix': False, 'cacheonly': False, 'disable_gpg_check': False, 'disable_plugin': [], 'disablerepo': [], 'download_only': False, 'enable_plugin': [], 'enablerepo': [], 'exclude': [], 'installroot': '/', 'install_repoquery': True, 'install_weak_deps': True, 'security': False, 'skip_broken': False, 'update_cache': False, 'update_only': False, 'validate_certs': True, 'sslverify': True, 'lock_timeout': 30, 'use_backend': 'auto', 'best': None, 'conf_file': None, 'disable_excludes': None, 'download_dir': None, 'list': None, 'nobest': None, 'releasever': None}}, '_ansible_no_log': False, 'changed': False} TASK [Get final state of services] ********************************************* task path: /tmp/collections-MIG/ansible_collections/fedora/linux_system_roles/tests/metrics/restore_services_state.yml:3 Saturday 24 May 2025 06:24:01 -0400 (0:00:00.020) 0:00:11.196 ********** ok: [managed-node2] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "apt-daily.service": { "name": "apt-daily.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autofs.service": { "name": "autofs.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd-restricted.service": { "name": "chronyd-restricted.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "cpupower.service": { "name": "cpupower.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-broker.service": { "name": "dbus-broker.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "active", "status": "alias" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "inactive", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "grub2-systemd-integration.service": { "name": "grub2-systemd-integration.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "hv_kvp_daemon.service": { "name": "hv_kvp_daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "logrotate.service": { "name": "logrotate.service", "source": "systemd", "state": "stopped", "status": "static" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "modprobe@.service": { "name": "modprobe@.service", "source": "systemd", "state": "unknown", "status": "static" }, "modprobe@configfs.service": { "name": "modprobe@configfs.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@drm.service": { "name": "modprobe@drm.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@efi_pstore.service": { "name": "modprobe@efi_pstore.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@fuse.service": { "name": "modprobe@fuse.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "network.service": { "name": "network.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "static" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "oddjobd.service": { "name": "oddjobd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "pam_namespace.service": { "name": "pam_namespace.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quotaon.service": { "name": "quotaon.service", "source": "systemd", "state": "inactive", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rdisc.service": { "name": "rdisc.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-svcgssd.service": { "name": "rpc-svcgssd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpmdb-rebuild.service": { "name": "rpmdb-rebuild.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "selinux-check-proper-disable.service": { "name": "selinux-check-proper-disable.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "serial-getty@ttyS0.service": { "name": "serial-getty@ttyS0.service", "source": "systemd", "state": "running", "status": "active" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bless-boot.service": { "name": "systemd-bless-boot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-boot-check-no-failures.service": { "name": "systemd-boot-check-no-failures.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-boot-random-seed.service": { "name": "systemd-boot-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-boot-update.service": { "name": "systemd-boot-update.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-growfs-root.service": { "name": "systemd-growfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-growfs@.service": { "name": "systemd-growfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-resume@.service": { "name": "systemd-hibernate-resume@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-journald@.service": { "name": "systemd-journald@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-network-generator.service": { "name": "systemd-network-generator.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-networkd-wait-online.service": { "name": "systemd-networkd-wait-online.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-pcrfs-root.service": { "name": "systemd-pcrfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pcrfs@.service": { "name": "systemd-pcrfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrmachine.service": { "name": "systemd-pcrmachine.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-initrd.service": { "name": "systemd-pcrphase-initrd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-sysinit.service": { "name": "systemd-pcrphase-sysinit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase.service": { "name": "systemd-pcrphase.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-quotacheck.service": { "name": "systemd-quotacheck.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-repart.service": { "name": "systemd-repart.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysext.service": { "name": "systemd-sysext.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-sysupdate-reboot.service": { "name": "systemd-sysupdate-reboot.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysupdate.service": { "name": "systemd-sysupdate.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles.service": { "name": "systemd-tmpfiles.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "teamd@.service": { "name": "teamd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "active" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "active" }, "ypbind.service": { "name": "ypbind.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "yppasswdd.service": { "name": "yppasswdd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ypserv.service": { "name": "ypserv.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ypxfrd.service": { "name": "ypxfrd.service", "source": "systemd", "state": "stopped", "status": "not-found" } } }, "changed": false } TASK [Restore state of services] *********************************************** task path: /tmp/collections-MIG/ansible_collections/fedora/linux_system_roles/tests/metrics/restore_services_state.yml:9 Saturday 24 May 2025 06:24:03 -0400 (0:00:02.507) 0:00:13.704 ********** skipping: [managed-node2] => (item=pmcd) => { "ansible_loop_var": "item", "changed": false, "false_condition": "item + '.service' in final_state.ansible_facts.services", "item": "pmcd", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=pmlogger) => { "ansible_loop_var": "item", "changed": false, "false_condition": "item + '.service' in final_state.ansible_facts.services", "item": "pmlogger", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=pmie) => { "ansible_loop_var": "item", "changed": false, "false_condition": "item + '.service' in final_state.ansible_facts.services", "item": "pmie", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=pmproxy) => { "ansible_loop_var": "item", "changed": false, "false_condition": "item + '.service' in final_state.ansible_facts.services", "item": "pmproxy", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=redis) => { "ansible_loop_var": "item", "changed": false, "false_condition": "item + '.service' in final_state.ansible_facts.services", "item": "redis", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=valkey) => { "ansible_loop_var": "item", "changed": false, "false_condition": "item + '.service' in final_state.ansible_facts.services", "item": "valkey", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=grafana-server) => { "ansible_loop_var": "item", "changed": false, "false_condition": "item + '.service' in final_state.ansible_facts.services", "item": "grafana-server", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => { "changed": false } MSG: All items skipped TASK [Stop firewall] *********************************************************** task path: /tmp/collections-MIG/ansible_collections/fedora/linux_system_roles/tests/metrics/restore_services_state.yml:29 Saturday 24 May 2025 06:24:03 -0400 (0:00:00.052) 0:00:13.756 ********** skipping: [managed-node2] => { "changed": false, "false_condition": "metrics_manage_firewall | bool", "skip_reason": "Conditional result was False" } PLAY RECAP ********************************************************************* managed-node2 : ok=15 changed=0 unreachable=0 failed=1 skipped=10 rescued=1 ignored=0 SYSTEM ROLES ERRORS BEGIN v1 [ { "ansible_version": "2.17.12", "end_time": "2025-05-24T10:24:00.605079+00:00Z", "host": "managed-node2", "message": "Failed to download metadata for repo 'highavailability': Cannot download repomd.xml: Cannot download repodata/repomd.xml: All mirrors were tried", "rc": 1, "start_time": "2025-05-24T10:23:55.931183+00:00Z", "task_name": "Install needed bpftrace metrics packages", "task_path": "/tmp/collections-MIG/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_bpftrace/tasks/main.yml:41" }, { "ansible_version": "2.17.12", "end_time": "2025-05-24T10:24:01.161702+00:00Z", "host": "managed-node2", "message": { "_ansible_no_log": false, "changed": false, "failed": true, "invocation": { "module_args": { "allow_downgrade": false, "allowerasing": false, "autoremove": false, "best": null, "bugfix": false, "cacheonly": false, "conf_file": null, "disable_excludes": null, "disable_gpg_check": false, "disable_plugin": [], "disablerepo": [], "download_dir": null, "download_only": false, "enable_plugin": [], "enablerepo": [], "exclude": [], "install_repoquery": true, "install_weak_deps": true, "installroot": "/", "list": null, "lock_timeout": 30, "name": [ "pcp-pmda-bpftrace", "bpftrace" ], "nobest": null, "releasever": null, "security": false, "skip_broken": false, "sslverify": true, "state": "present", "update_cache": false, "update_only": false, "use_backend": "auto", "validate_certs": true } }, "msg": "Failed to download metadata for repo 'highavailability': Cannot download repomd.xml: Cannot download repodata/repomd.xml: All mirrors were tried", "rc": 1, "results": [] }, "start_time": "2025-05-24T10:24:01.147845+00:00Z", "task_name": "Reraise error", "task_path": "/tmp/collections-MIG/ansible_collections/fedora/linux_system_roles/tests/metrics/handle_test_failure.yml:17" } ] SYSTEM ROLES ERRORS END v1 TASKS RECAP ******************************************************************** Saturday 24 May 2025 06:24:03 -0400 (0:00:00.010) 0:00:13.766 ********** =============================================================================== fedora.linux_system_roles.private_metrics_subrole_bpftrace : Install needed bpftrace metrics packages --- 4.68s /tmp/collections-MIG/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_bpftrace/tasks/main.yml:41 Get initial state of services ------------------------------------------- 2.64s /tmp/collections-MIG/ansible_collections/fedora/linux_system_roles/tests/metrics/get_services_state.yml:3 Get final state of services --------------------------------------------- 2.51s /tmp/collections-MIG/ansible_collections/fedora/linux_system_roles/tests/metrics/restore_services_state.yml:3 Gathering Facts --------------------------------------------------------- 2.47s /tmp/collections-MIG/ansible_collections/fedora/linux_system_roles/tests/metrics/tests_bz1855544.yml:9 Collect logs ------------------------------------------------------------ 0.51s /tmp/collections-MIG/ansible_collections/fedora/linux_system_roles/tests/metrics/handle_test_failure.yml:2 fedora.linux_system_roles.private_metrics_subrole_bpftrace : Check if system is ostree --- 0.43s /tmp/collections-MIG/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_bpftrace/tasks/main.yml:18 Restore state of services ----------------------------------------------- 0.05s /tmp/collections-MIG/ansible_collections/fedora/linux_system_roles/tests/metrics/restore_services_state.yml:9 fedora.linux_system_roles.private_metrics_subrole_bpftrace : Set platform/version specific variables --- 0.05s /tmp/collections-MIG/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_bpftrace/tasks/main.yml:4 Setup bpftrace metrics. ------------------------------------------------- 0.04s /tmp/collections-MIG/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:78 Run the role ------------------------------------------------------------ 0.03s /tmp/collections-MIG/ansible_collections/fedora/linux_system_roles/tests/metrics/tests_bz1855544.yml:27 Handle test failure ----------------------------------------------------- 0.03s /tmp/collections-MIG/ansible_collections/fedora/linux_system_roles/tests/metrics/tests_bz1855544.yml:47 fedora.linux_system_roles.private_metrics_subrole_bpftrace : Set flag to indicate system is ostree --- 0.02s /tmp/collections-MIG/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_bpftrace/tasks/main.yml:23 Stop test --------------------------------------------------------------- 0.02s /tmp/collections-MIG/ansible_collections/fedora/linux_system_roles/tests/metrics/tests_bz1855544.yml:17 fedora.linux_system_roles.private_metrics_subrole_bpftrace : Establish bpftrace package names --- 0.02s /tmp/collections-MIG/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_bpftrace/tasks/main.yml:27 fedora.linux_system_roles.private_metrics_subrole_bpftrace : Establish bpftrace metrics package names --- 0.02s /tmp/collections-MIG/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_bpftrace/tasks/main.yml:34 fedora.linux_system_roles.metrics : Setup metrics access for roles ------ 0.02s /tmp/collections-MIG/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:28 fedora.linux_system_roles.metrics : Add bpftrace to metrics domain list --- 0.02s /tmp/collections-MIG/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:23 Reraise error ----------------------------------------------------------- 0.02s /tmp/collections-MIG/ansible_collections/fedora/linux_system_roles/tests/metrics/handle_test_failure.yml:17 fedora.linux_system_roles.metrics : Ensure ansible_facts used by role --- 0.02s /tmp/collections-MIG/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:3 Include vault variables ------------------------------------------------- 0.02s /tmp/collections-MIG/ansible_collections/fedora/linux_system_roles/tests/metrics/tests_bz1855544.yml:5