ansible-playbook [core 2.17.14] config file = None configured module search path = ['/root/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules'] ansible python module location = /usr/local/lib/python3.12/site-packages/ansible ansible collection location = /tmp/collections-O8Y executable location = /usr/local/bin/ansible-playbook python version = 3.12.11 (main, Aug 14 2025, 00:00:00) [GCC 11.5.0 20240719 (Red Hat 11.5.0-11)] (/usr/bin/python3.12) jinja version = 3.1.6 libyaml = True No config file found; using defaults running playbook inside collection fedora.linux_system_roles Skipping callback 'debug', as we already have a stdout callback. Skipping callback 'json', as we already have a stdout callback. Skipping callback 'jsonl', as we already have a stdout callback. Skipping callback 'default', as we already have a stdout callback. Skipping callback 'minimal', as we already have a stdout callback. Skipping callback 'oneline', as we already have a stdout callback. PLAYBOOK: tests_default.yml **************************************************** 1 plays in /tmp/collections-O8Y/ansible_collections/fedora/linux_system_roles/tests/hpc/tests_default.yml PLAY [Ensure that the role runs with default parameters] *********************** TASK [Run the role] ************************************************************ task path: /tmp/collections-O8Y/ansible_collections/fedora/linux_system_roles/tests/hpc/tests_default.yml:19 Wednesday 17 September 2025 11:06:18 -0400 (0:00:00.044) 0:00:00.044 *** included: fedora.linux_system_roles.hpc for managed-node1 TASK [fedora.linux_system_roles.hpc : Set platform/version specific variables] *** task path: /tmp/collections-O8Y/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:3 Wednesday 17 September 2025 11:06:18 -0400 (0:00:00.060) 0:00:00.104 *** included: /tmp/collections-O8Y/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/set_vars.yml for managed-node1 TASK [fedora.linux_system_roles.hpc : Ensure ansible_facts used by role] ******* task path: /tmp/collections-O8Y/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/set_vars.yml:2 Wednesday 17 September 2025 11:06:18 -0400 (0:00:00.016) 0:00:00.121 *** [WARNING]: Platform linux on host managed-node1 is using the discovered Python interpreter at /usr/bin/python3.9, but future installation of another Python interpreter could change the meaning of that path. See https://docs.ansible.com/ansible- core/2.17/reference_appendices/interpreter_discovery.html for more information. ok: [managed-node1] TASK [fedora.linux_system_roles.hpc : Check if system is ostree] *************** task path: /tmp/collections-O8Y/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/set_vars.yml:10 Wednesday 17 September 2025 11:06:19 -0400 (0:00:01.025) 0:00:01.146 *** ok: [managed-node1] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.hpc : Set flag to indicate system is ostree] *** task path: /tmp/collections-O8Y/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/set_vars.yml:15 Wednesday 17 September 2025 11:06:20 -0400 (0:00:00.431) 0:00:01.578 *** ok: [managed-node1] => { "ansible_facts": { "__hpc_is_ostree": false }, "changed": false } TASK [fedora.linux_system_roles.hpc : Set platform/version specific variables] *** task path: /tmp/collections-O8Y/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/set_vars.yml:19 Wednesday 17 September 2025 11:06:20 -0400 (0:00:00.021) 0:00:01.600 *** skipping: [managed-node1] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node1] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node1] => (item=CentOS_9.yml) => { "ansible_facts": { "__template_packages": [], "__template_services": [] }, "ansible_included_var_files": [ "/tmp/collections-O8Y/ansible_collections/fedora/linux_system_roles/roles/hpc/vars/CentOS_9.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_9.yml" } ok: [managed-node1] => (item=CentOS_9.yml) => { "ansible_facts": { "__template_packages": [], "__template_services": [] }, "ansible_included_var_files": [ "/tmp/collections-O8Y/ansible_collections/fedora/linux_system_roles/roles/hpc/vars/CentOS_9.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_9.yml" } TASK [fedora.linux_system_roles.hpc : Deploy the GPG key for RHEL EPEL repository] *** task path: /tmp/collections-O8Y/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:6 Wednesday 17 September 2025 11:06:20 -0400 (0:00:00.043) 0:00:01.643 *** ok: [managed-node1] => { "changed": false } TASK [fedora.linux_system_roles.hpc : Install EPEL release package] ************ task path: /tmp/collections-O8Y/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:12 Wednesday 17 September 2025 11:06:21 -0400 (0:00:01.069) 0:00:02.713 *** ok: [managed-node1] => { "changed": false, "rc": 0, "results": [ "Installed /root/.ansible/tmp/ansible-tmp-1758121581.314758-8282-107318434937515/epel-release-latest-9.noarchseue9lqv.rpm" ] } MSG: Nothing to do TASK [fedora.linux_system_roles.hpc : Deploy the GPG key for NVIDIA repositories] *** task path: /tmp/collections-O8Y/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:18 Wednesday 17 September 2025 11:06:22 -0400 (0:00:01.505) 0:00:04.218 *** changed: [managed-node1] => { "changed": true } TASK [fedora.linux_system_roles.hpc : Configure the NVIDIA CUDA repository] **** task path: /tmp/collections-O8Y/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:23 Wednesday 17 September 2025 11:06:23 -0400 (0:00:00.525) 0:00:04.743 *** redirecting (type: action) ansible.builtin.yum to ansible.builtin.dnf changed: [managed-node1] => { "changed": true, "repo": "nvidia-cuda", "state": "present" } TASK [fedora.linux_system_roles.hpc : Configure the RHUI repository] *********** task path: /tmp/collections-O8Y/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:30 Wednesday 17 September 2025 11:06:23 -0400 (0:00:00.432) 0:00:05.176 *** redirecting (type: action) ansible.builtin.yum to ansible.builtin.dnf changed: [managed-node1] => { "changed": true, "repo": "rhel-9-for-x86_64-appstream-rhui-rpms", "state": "present" } TASK [fedora.linux_system_roles.hpc : Install lvm2 to get lvs command] ********* task path: /tmp/collections-O8Y/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:43 Wednesday 17 September 2025 11:06:24 -0400 (0:00:00.347) 0:00:05.524 *** skipping: [managed-node1] => { "changed": false, "false_condition": "hpc_manage_storage", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.hpc : Get current LV size of rootlv] *********** task path: /tmp/collections-O8Y/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:49 Wednesday 17 September 2025 11:06:24 -0400 (0:00:00.015) 0:00:05.539 *** skipping: [managed-node1] => { "changed": false, "false_condition": "hpc_manage_storage", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.hpc : Get current LV size of usrlv] ************ task path: /tmp/collections-O8Y/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:56 Wednesday 17 September 2025 11:06:24 -0400 (0:00:00.013) 0:00:05.553 *** skipping: [managed-node1] => { "changed": false, "false_condition": "hpc_manage_storage", "skip_reason": "Conditional result was False" } TASK [Configure storage] ******************************************************* task path: /tmp/collections-O8Y/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:63 Wednesday 17 September 2025 11:06:24 -0400 (0:00:00.011) 0:00:05.564 *** skipping: [managed-node1] => { "changed": false, "false_condition": "hpc_manage_storage", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.hpc : Update RHUI packages from Microsoft repositories] *** task path: /tmp/collections-O8Y/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:91 Wednesday 17 September 2025 11:06:24 -0400 (0:00:00.011) 0:00:05.576 *** skipping: [managed-node1] => { "changed": false, "false_condition": "ansible_system_vendor == \"Microsoft Corporation\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.hpc : Force install kernel] ******************** task path: /tmp/collections-O8Y/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:99 Wednesday 17 September 2025 11:06:24 -0400 (0:00:00.011) 0:00:05.587 *** skipping: [managed-node1] => { "changed": false, "false_condition": "__hpc_force_kernel_version is not none", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.hpc : Explicitly install kernel-devel and kernel-headers packages matching the currently running kernel] *** task path: /tmp/collections-O8Y/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:106 Wednesday 17 September 2025 11:06:24 -0400 (0:00:00.016) 0:00:05.603 *** fatal: [managed-node1]: FAILED! => { "changed": false, "rc": 1, "results": [] } MSG: Failed to download metadata for repo 'rhel-9-for-x86_64-appstream-rhui-rpms': Cannot download repomd.xml: Cannot download repodata/repomd.xml: All mirrors were tried TASK [Cleanup] ***************************************************************** task path: /tmp/collections-O8Y/ansible_collections/fedora/linux_system_roles/tests/hpc/tests_default.yml:23 Wednesday 17 September 2025 11:06:26 -0400 (0:00:02.024) 0:00:07.628 *** included: /tmp/collections-O8Y/ansible_collections/fedora/linux_system_roles/tests/hpc/tasks/cleanup.yml for managed-node1 TASK [Check if versionlock entries exist] ************************************** task path: /tmp/collections-O8Y/ansible_collections/fedora/linux_system_roles/tests/hpc/tasks/cleanup.yml:3 Wednesday 17 September 2025 11:06:26 -0400 (0:00:00.023) 0:00:07.652 *** ok: [managed-node1] => { "changed": false, "stat": { "exists": false } } TASK [Clear dnf versionlock entries] ******************************************* task path: /tmp/collections-O8Y/ansible_collections/fedora/linux_system_roles/tests/hpc/tasks/cleanup.yml:8 Wednesday 17 September 2025 11:06:26 -0400 (0:00:00.344) 0:00:07.996 *** skipping: [managed-node1] => { "changed": false, "false_condition": "__hpc_versionlock_stat.stat.exists", "skip_reason": "Conditional result was False" } PLAY RECAP ********************************************************************* managed-node1 : ok=13 changed=3 unreachable=0 failed=1 skipped=7 rescued=0 ignored=0 SYSTEM ROLES ERRORS BEGIN v1 [ { "ansible_version": "2.17.14", "end_time": "2025-09-17T15:06:26.167470+00:00Z", "host": "managed-node1", "message": "Failed to download metadata for repo 'rhel-9-for-x86_64-appstream-rhui-rpms': Cannot download repomd.xml: Cannot download repodata/repomd.xml: All mirrors were tried", "rc": 1, "start_time": "2025-09-17T15:06:24.146260+00:00Z", "task_name": "Explicitly install kernel-devel and kernel-headers packages matching the currently running kernel", "task_path": "/tmp/collections-O8Y/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:106" } ] SYSTEM ROLES ERRORS END v1 TASKS RECAP ******************************************************************** Wednesday 17 September 2025 11:06:26 -0400 (0:00:00.011) 0:00:08.007 *** =============================================================================== fedora.linux_system_roles.hpc : Explicitly install kernel-devel and kernel-headers packages matching the currently running kernel --- 2.02s /tmp/collections-O8Y/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:106 fedora.linux_system_roles.hpc : Install EPEL release package ------------ 1.51s /tmp/collections-O8Y/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:12 fedora.linux_system_roles.hpc : Deploy the GPG key for RHEL EPEL repository --- 1.07s /tmp/collections-O8Y/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:6 fedora.linux_system_roles.hpc : Ensure ansible_facts used by role ------- 1.03s /tmp/collections-O8Y/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/set_vars.yml:2 fedora.linux_system_roles.hpc : Deploy the GPG key for NVIDIA repositories --- 0.53s /tmp/collections-O8Y/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:18 fedora.linux_system_roles.hpc : Configure the NVIDIA CUDA repository ---- 0.43s /tmp/collections-O8Y/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:23 fedora.linux_system_roles.hpc : Check if system is ostree --------------- 0.43s /tmp/collections-O8Y/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/set_vars.yml:10 fedora.linux_system_roles.hpc : Configure the RHUI repository ----------- 0.35s /tmp/collections-O8Y/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:30 Check if versionlock entries exist -------------------------------------- 0.34s /tmp/collections-O8Y/ansible_collections/fedora/linux_system_roles/tests/hpc/tasks/cleanup.yml:3 Run the role ------------------------------------------------------------ 0.06s /tmp/collections-O8Y/ansible_collections/fedora/linux_system_roles/tests/hpc/tests_default.yml:19 fedora.linux_system_roles.hpc : Set platform/version specific variables --- 0.04s /tmp/collections-O8Y/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/set_vars.yml:19 Cleanup ----------------------------------------------------------------- 0.02s /tmp/collections-O8Y/ansible_collections/fedora/linux_system_roles/tests/hpc/tests_default.yml:23 fedora.linux_system_roles.hpc : Set flag to indicate system is ostree --- 0.02s /tmp/collections-O8Y/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/set_vars.yml:15 fedora.linux_system_roles.hpc : Set platform/version specific variables --- 0.02s /tmp/collections-O8Y/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:3 fedora.linux_system_roles.hpc : Force install kernel -------------------- 0.02s /tmp/collections-O8Y/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:99 fedora.linux_system_roles.hpc : Install lvm2 to get lvs command --------- 0.02s /tmp/collections-O8Y/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:43 fedora.linux_system_roles.hpc : Get current LV size of rootlv ----------- 0.01s /tmp/collections-O8Y/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:49 Configure storage ------------------------------------------------------- 0.01s /tmp/collections-O8Y/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:63 fedora.linux_system_roles.hpc : Update RHUI packages from Microsoft repositories --- 0.01s /tmp/collections-O8Y/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:91 Clear dnf versionlock entries ------------------------------------------- 0.01s /tmp/collections-O8Y/ansible_collections/fedora/linux_system_roles/tests/hpc/tasks/cleanup.yml:8 Sep 17 11:06:19 managed-node1 python3.9[8382]: ansible-setup Invoked with gather_subset=['!all', '!min', 'distribution', 'distribution_major_version', 'distribution_version', 'os_family', 'devices'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d Sep 17 11:06:20 managed-node1 python3.9[8546]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Sep 17 11:06:20 managed-node1 python3.9[8695]: ansible-rpm_key Invoked with key=https://dl.fedoraproject.org/pub/epel/RPM-GPG-KEY-EPEL-9 state=present validate_certs=True fingerprint=None Sep 17 11:06:21 managed-node1 python3.9[8849]: ansible-ansible.legacy.setup Invoked with filter=['ansible_pkg_mgr'] gather_subset=['!all'] gather_timeout=10 fact_path=/etc/ansible/facts.d Sep 17 11:06:22 managed-node1 python3.9[8926]: ansible-ansible.legacy.dnf Invoked with name=['https://dl.fedoraproject.org/pub/epel/epel-release-latest-9.noarch.rpm'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Sep 17 11:06:23 managed-node1 python3.9[9076]: ansible-rpm_key Invoked with key=https://developer.download.nvidia.com/compute/cuda/repos/rhel9/x86_64/D42D0685.pub state=present validate_certs=True fingerprint=None Sep 17 11:06:23 managed-node1 python3.9[9231]: ansible-yum_repository Invoked with name=nvidia-cuda description=NVIDIA CUDA repository baseurl=['https://developer.download.nvidia.com/compute/cuda/repos/rhel9/x86_64'] gpgcheck=True reposdir=/etc/yum.repos.d state=present unsafe_writes=False bandwidth=None cost=None deltarpm_metadata_percentage=None deltarpm_percentage=None enabled=None enablegroups=None exclude=None failovermethod=None file=None gpgcakey=None gpgkey=None module_hotfixes=None http_caching=None include=None includepkgs=None ip_resolve=None keepalive=None keepcache=None metadata_expire=None metadata_expire_filter=None metalink=None mirrorlist=None mirrorlist_expire=None password=NOT_LOGGING_PARAMETER priority=None protect=None proxy=None proxy_password=NOT_LOGGING_PARAMETER proxy_username=None repo_gpgcheck=None retries=None s3_enabled=None skip_if_unavailable=None sslcacert=None ssl_check_cert_permissions=None sslclientcert=None sslclientkey=None sslverify=None throttle=None timeout=None ui_repoid_vars=None username=None async=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Sep 17 11:06:24 managed-node1 python3.9[9380]: ansible-yum_repository Invoked with name=rhel-9-for-x86_64-appstream-rhui-rpms description=Red Hat Enterprise Linux 9 for x86_64 - AppStream from RHUI (RPMs) baseurl=['https://rhui4-1.microsoft.com/pulp/repos/content/dist/rhel9/rhui/$releasever/x86_64/appstream/os'] gpgcheck=True gpgkey=['file:///etc/pki/rpm-gpg/RPM-GPG-KEY-redhat-release'] reposdir=/etc/yum.repos.d state=present unsafe_writes=False bandwidth=None cost=None deltarpm_metadata_percentage=None deltarpm_percentage=None enabled=None enablegroups=None exclude=None failovermethod=None file=None gpgcakey=None module_hotfixes=None http_caching=None include=None includepkgs=None ip_resolve=None keepalive=None keepcache=None metadata_expire=None metadata_expire_filter=None metalink=None mirrorlist=None mirrorlist_expire=None password=NOT_LOGGING_PARAMETER priority=None protect=None proxy=None proxy_password=NOT_LOGGING_PARAMETER proxy_username=None repo_gpgcheck=None retries=None s3_enabled=None skip_if_unavailable=None sslcacert=None ssl_check_cert_permissions=None sslclientcert=None sslclientkey=None sslverify=None throttle=None timeout=None ui_repoid_vars=None username=None async=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Sep 17 11:06:24 managed-node1 python3.9[9529]: ansible-ansible.legacy.setup Invoked with filter=['ansible_pkg_mgr'] gather_subset=['!all'] gather_timeout=10 fact_path=/etc/ansible/facts.d Sep 17 11:06:24 managed-node1 python3.9[9606]: ansible-ansible.legacy.dnf Invoked with name=['kernel-devel-5.14.0-612.el9.x86_64', 'kernel-headers-5.14.0-612.el9.x86_64'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Sep 17 11:06:26 managed-node1 python3.9[9761]: ansible-stat Invoked with path=/etc/dnf/plugins/versionlock.list follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Sep 17 11:06:26 managed-node1 sshd[9786]: Accepted publickey for root from 10.31.15.125 port 37440 ssh2: RSA SHA256:9j1blwt3wcrRiGYZQ7ZGu9axm3cDklH6/z4c+Ee8CzE Sep 17 11:06:26 managed-node1 systemd-logind[609]: New session 14 of user root. ░░ Subject: A new session 14 has been created for user root ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A new session with the ID 14 has been created for the user root. ░░ ░░ The leading process of the session is 9786. Sep 17 11:06:26 managed-node1 systemd[1]: Started Session 14 of User root. ░░ Subject: A start job for unit session-14.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit session-14.scope has finished successfully. ░░ ░░ The job identifier is 1660. Sep 17 11:06:26 managed-node1 sshd[9786]: pam_unix(sshd:session): session opened for user root(uid=0) by root(uid=0) Sep 17 11:06:26 managed-node1 sshd[9789]: Received disconnect from 10.31.15.125 port 37440:11: disconnected by user Sep 17 11:06:26 managed-node1 sshd[9789]: Disconnected from user root 10.31.15.125 port 37440 Sep 17 11:06:26 managed-node1 sshd[9786]: pam_unix(sshd:session): session closed for user root Sep 17 11:06:26 managed-node1 systemd[1]: session-14.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit session-14.scope has successfully entered the 'dead' state. Sep 17 11:06:26 managed-node1 systemd-logind[609]: Session 14 logged out. Waiting for processes to exit. Sep 17 11:06:26 managed-node1 systemd-logind[609]: Removed session 14. ░░ Subject: Session 14 has been terminated ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A session with the ID 14 has been terminated. Sep 17 11:06:26 managed-node1 sshd[9814]: Accepted publickey for root from 10.31.15.125 port 37452 ssh2: RSA SHA256:9j1blwt3wcrRiGYZQ7ZGu9axm3cDklH6/z4c+Ee8CzE Sep 17 11:06:26 managed-node1 systemd-logind[609]: New session 15 of user root. ░░ Subject: A new session 15 has been created for user root ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A new session with the ID 15 has been created for the user root. ░░ ░░ The leading process of the session is 9814. Sep 17 11:06:26 managed-node1 systemd[1]: Started Session 15 of User root. ░░ Subject: A start job for unit session-15.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit session-15.scope has finished successfully. ░░ ░░ The job identifier is 1729. Sep 17 11:06:26 managed-node1 sshd[9814]: pam_unix(sshd:session): session opened for user root(uid=0) by root(uid=0)