ansible-playbook [core 2.17.4] config file = None configured module search path = ['/root/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules'] ansible python module location = /usr/local/lib/python3.12/site-packages/ansible ansible collection location = /tmp/collections-PvV executable location = /usr/local/bin/ansible-playbook python version = 3.12.5 (main, Aug 23 2024, 00:00:00) [GCC 11.5.0 20240719 (Red Hat 11.5.0-2)] (/usr/bin/python3.12) jinja version = 3.1.4 libyaml = True No config file found; using defaults running playbook inside collection fedora.linux_system_roles statically imported: /tmp/collections-PvV/ansible_collections/fedora/linux_system_roles/tests/metrics/get_services_state.yml statically imported: /tmp/collections-PvV/ansible_collections/fedora/linux_system_roles/tests/metrics/restore_services_state.yml redirecting (type: callback) ansible.builtin.debug to ansible.posix.debug redirecting (type: callback) ansible.builtin.debug to ansible.posix.debug redirecting (type: callback) ansible.builtin.profile_tasks to ansible.posix.profile_tasks Skipping callback 'default', as we already have a stdout callback. Skipping callback 'minimal', as we already have a stdout callback. Skipping callback 'oneline', as we already have a stdout callback. PLAYBOOK: tests_verify_fullstack.yml ******************************************* 2 plays in /tmp/collections-PvV/ansible_collections/fedora/linux_system_roles/tests/metrics/tests_verify_fullstack.yml PLAY [all] ********************************************************************* TASK [Include vault variables] ************************************************* task path: /tmp/collections-PvV/ansible_collections/fedora/linux_system_roles/tests/metrics/tests_verify_fullstack.yml:5 Wednesday 25 September 2024 06:41:27 -0400 (0:00:00.008) 0:00:00.008 *** ok: [managed-node3] => { "ansible_facts": { "pcptest_pw": { "__ansible_vault": "$ANSIBLE_VAULT;1.1;AES256\n65343431623161346664373330646165636437656265656632613961363839303132393064663934\n3137396633373562393466633037356533326566343338350a386238333034336162333932313162\n62643937336534356131376134303463306466316433366636643562633637376336653034646334\n3063663466333735390a333330366461386166633233373133326237323663333831653232646566\n3363\n" } }, "ansible_included_var_files": [ "/tmp/metrics-bXQ/tests/vars/vault-variables.yml" ], "changed": false } PLAY [Test the full PCP and Grafana stack] ************************************* TASK [Gathering Facts] ********************************************************* task path: /tmp/collections-PvV/ansible_collections/fedora/linux_system_roles/tests/metrics/tests_verify_fullstack.yml:9 Wednesday 25 September 2024 06:41:27 -0400 (0:00:00.021) 0:00:00.030 *** [WARNING]: Platform linux on host managed-node3 is using the discovered Python interpreter at /usr/bin/python3.9, but future installation of another Python interpreter could change the meaning of that path. See https://docs.ansible.com/ansible- core/2.17/reference_appendices/interpreter_discovery.html for more information. ok: [managed-node3] TASK [Stop test] *************************************************************** task path: /tmp/collections-PvV/ansible_collections/fedora/linux_system_roles/tests/metrics/tests_verify_fullstack.yml:18 Wednesday 25 September 2024 06:41:28 -0400 (0:00:01.014) 0:00:01.045 *** META: end_host conditional evaluated to False, continuing execution for managed-node3 skipping: [managed-node3] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node3" } MSG: end_host conditional evaluated to false, continuing execution for managed-node3 TASK [Get initial state of services] ******************************************* task path: /tmp/collections-PvV/ansible_collections/fedora/linux_system_roles/tests/metrics/get_services_state.yml:3 Wednesday 25 September 2024 06:41:28 -0400 (0:00:00.038) 0:00:01.083 *** ok: [managed-node3] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autofs.service": { "name": "autofs.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "avahi-daemon.service": { "name": "avahi-daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd-restricted.service": { "name": "chronyd-restricted.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "cpupower.service": { "name": "cpupower.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-broker.service": { "name": "dbus-broker.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "active", "status": "alias" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "inactive", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "grub2-systemd-integration.service": { "name": "grub2-systemd-integration.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "hv_kvp_daemon.service": { "name": "hv_kvp_daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "logrotate.service": { "name": "logrotate.service", "source": "systemd", "state": "stopped", "status": "static" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "modprobe@.service": { "name": "modprobe@.service", "source": "systemd", "state": "unknown", "status": "static" }, "modprobe@configfs.service": { "name": "modprobe@configfs.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@drm.service": { "name": "modprobe@drm.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@fuse.service": { "name": "modprobe@fuse.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "network.service": { "name": "network.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "static" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "oddjobd.service": { "name": "oddjobd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "pam_namespace.service": { "name": "pam_namespace.service", "source": "systemd", "state": "inactive", "status": "static" }, "pcp-reboot-init.service": { "name": "pcp-reboot-init.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "pmcd.service": { "name": "pmcd.service", "source": "systemd", "state": "running", "status": "enabled" }, "pmfind.service": { "name": "pmfind.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "pmie.service": { "name": "pmie.service", "source": "systemd", "state": "running", "status": "enabled" }, "pmie_check.service": { "name": "pmie_check.service", "source": "systemd", "state": "stopped", "status": "static" }, "pmie_daily.service": { "name": "pmie_daily.service", "source": "systemd", "state": "stopped", "status": "static" }, "pmie_farm.service": { "name": "pmie_farm.service", "source": "systemd", "state": "running", "status": "disabled" }, "pmie_farm_check.service": { "name": "pmie_farm_check.service", "source": "systemd", "state": "stopped", "status": "static" }, "pmlogger.service": { "name": "pmlogger.service", "source": "systemd", "state": "running", "status": "enabled" }, "pmlogger_check.service": { "name": "pmlogger_check.service", "source": "systemd", "state": "stopped", "status": "static" }, "pmlogger_daily.service": { "name": "pmlogger_daily.service", "source": "systemd", "state": "stopped", "status": "static" }, "pmlogger_farm.service": { "name": "pmlogger_farm.service", "source": "systemd", "state": "running", "status": "disabled" }, "pmlogger_farm_check.service": { "name": "pmlogger_farm_check.service", "source": "systemd", "state": "stopped", "status": "static" }, "pmproxy.service": { "name": "pmproxy.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quotaon.service": { "name": "quotaon.service", "source": "systemd", "state": "inactive", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rdisc.service": { "name": "rdisc.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "redis.service": { "name": "redis.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-svcgssd.service": { "name": "rpc-svcgssd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpmdb-rebuild.service": { "name": "rpmdb-rebuild.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "selinux-check-proper-disable.service": { "name": "selinux-check-proper-disable.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "serial-getty@ttyS0.service": { "name": "serial-getty@ttyS0.service", "source": "systemd", "state": "running", "status": "active" }, "snapd.seeded.service": { "name": "snapd.seeded.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bless-boot.service": { "name": "systemd-bless-boot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-boot-check-no-failures.service": { "name": "systemd-boot-check-no-failures.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-boot-random-seed.service": { "name": "systemd-boot-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-boot-update.service": { "name": "systemd-boot-update.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-growfs-root.service": { "name": "systemd-growfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-growfs@.service": { "name": "systemd-growfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-resume@.service": { "name": "systemd-hibernate-resume@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-journald@.service": { "name": "systemd-journald@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-network-generator.service": { "name": "systemd-network-generator.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-networkd-wait-online.service": { "name": "systemd-networkd-wait-online.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-pcrfs-root.service": { "name": "systemd-pcrfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pcrfs@.service": { "name": "systemd-pcrfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrmachine.service": { "name": "systemd-pcrmachine.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-initrd.service": { "name": "systemd-pcrphase-initrd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-sysinit.service": { "name": "systemd-pcrphase-sysinit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase.service": { "name": "systemd-pcrphase.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-quotacheck.service": { "name": "systemd-quotacheck.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-repart.service": { "name": "systemd-repart.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysext.service": { "name": "systemd-sysext.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-sysupdate-reboot.service": { "name": "systemd-sysupdate-reboot.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysupdate.service": { "name": "systemd-sysupdate.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles.service": { "name": "systemd-tmpfiles.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "teamd@.service": { "name": "teamd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "active" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "active" }, "ypbind.service": { "name": "ypbind.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "yppasswdd.service": { "name": "yppasswdd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ypserv.service": { "name": "ypserv.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ypxfrd.service": { "name": "ypxfrd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "zabbix-agent.service": { "name": "zabbix-agent.service", "source": "systemd", "state": "stopped", "status": "not-found" } } }, "changed": false } TASK [Run the role] ************************************************************ task path: /tmp/collections-PvV/ansible_collections/fedora/linux_system_roles/tests/metrics/tests_verify_fullstack.yml:28 Wednesday 25 September 2024 06:41:29 -0400 (0:00:01.586) 0:00:02.669 *** included: fedora.linux_system_roles.metrics for managed-node3 TASK [fedora.linux_system_roles.metrics : Ensure ansible_facts used by role] *** task path: /tmp/collections-PvV/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:3 Wednesday 25 September 2024 06:41:29 -0400 (0:00:00.034) 0:00:02.704 *** skipping: [managed-node3] => { "changed": false, "false_condition": "__metrics_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.metrics : Add Elasticsearch to metrics domain list] *** task path: /tmp/collections-PvV/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:8 Wednesday 25 September 2024 06:41:30 -0400 (0:00:00.020) 0:00:02.724 *** skipping: [managed-node3] => { "changed": false, "false_condition": "metrics_from_elasticsearch | d(false) | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.metrics : Add SQL Server to metrics domain list] *** task path: /tmp/collections-PvV/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:13 Wednesday 25 September 2024 06:41:30 -0400 (0:00:00.017) 0:00:02.741 *** skipping: [managed-node3] => { "changed": false, "false_condition": "metrics_from_mssql | d(false) | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.metrics : Add Postfix to metrics domain list] *** task path: /tmp/collections-PvV/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:18 Wednesday 25 September 2024 06:41:30 -0400 (0:00:00.021) 0:00:02.763 *** skipping: [managed-node3] => { "changed": false, "false_condition": "metrics_from_postfix | d(false) | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.metrics : Add bpftrace to metrics domain list] *** task path: /tmp/collections-PvV/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:23 Wednesday 25 September 2024 06:41:30 -0400 (0:00:00.017) 0:00:02.781 *** skipping: [managed-node3] => { "changed": false, "false_condition": "metrics_from_bpftrace | d(false) | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.metrics : Setup metrics access for roles] ****** task path: /tmp/collections-PvV/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:28 Wednesday 25 September 2024 06:41:30 -0400 (0:00:00.017) 0:00:02.798 *** ok: [managed-node3] => { "ansible_facts": { "__metrics_accounts": [ { "saslpassword": "metrics", "sasluser": "metrics", "user": "metrics" } ] }, "changed": false } TASK [Configure Elasticsearch metrics] ***************************************** task path: /tmp/collections-PvV/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:35 Wednesday 25 September 2024 06:41:30 -0400 (0:00:00.028) 0:00:02.827 *** skipping: [managed-node3] => { "changed": false, "false_condition": "metrics_from_elasticsearch | d(false) | bool or metrics_into_elasticsearch | d(false) | bool\n", "skip_reason": "Conditional result was False" } TASK [Configure SQL Server metrics.] ******************************************* task path: /tmp/collections-PvV/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:50 Wednesday 25 September 2024 06:41:30 -0400 (0:00:00.027) 0:00:02.854 *** skipping: [managed-node3] => { "changed": false, "false_condition": "metrics_from_mssql | d(false) | bool", "skip_reason": "Conditional result was False" } TASK [Configure Postfix metrics.] ********************************************** task path: /tmp/collections-PvV/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:58 Wednesday 25 September 2024 06:41:30 -0400 (0:00:00.026) 0:00:02.881 *** skipping: [managed-node3] => { "changed": false, "false_condition": "metrics_from_postfix | d(false) | bool", "skip_reason": "Conditional result was False" } TASK [Setup bpftrace metrics.] ************************************************* task path: /tmp/collections-PvV/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:66 Wednesday 25 September 2024 06:41:30 -0400 (0:00:00.026) 0:00:02.907 *** skipping: [managed-node3] => { "changed": false, "false_condition": "metrics_from_bpftrace | d(false) | bool", "skip_reason": "Conditional result was False" } TASK [Setup metric querying service.] ****************************************** task path: /tmp/collections-PvV/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:75 Wednesday 25 September 2024 06:41:30 -0400 (0:00:00.017) 0:00:02.924 *** included: fedora.linux_system_roles.private_metrics_subrole_keyserver for managed-node3 TASK [fedora.linux_system_roles.private_metrics_subrole_keyserver : Set platform/version specific variables] *** task path: /tmp/collections-PvV/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_keyserver/tasks/main.yml:4 Wednesday 25 September 2024 06:41:30 -0400 (0:00:00.044) 0:00:02.969 *** ok: [managed-node3] => (item=/tmp/collections-PvV/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_keyserver/vars/default.yml) => { "ansible_facts": { "__keyserver_conf_link": "/etc" }, "ansible_included_var_files": [ "/tmp/collections-PvV/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_keyserver/vars/default.yml" ], "ansible_loop_var": "item", "changed": false, "item": "/tmp/collections-PvV/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_keyserver/vars/default.yml" } ok: [managed-node3] => (item=/tmp/collections-PvV/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_keyserver/vars/RedHat.yml) => { "ansible_facts": { "__keyserver_packages": [ "valkey" ] }, "ansible_included_var_files": [ "/tmp/collections-PvV/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_keyserver/vars/RedHat.yml" ], "ansible_loop_var": "item", "changed": false, "item": "/tmp/collections-PvV/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_keyserver/vars/RedHat.yml" } skipping: [managed-node3] => (item=/tmp/collections-PvV/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_keyserver/vars/RedHat_x86_64.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "item is file", "item": "/tmp/collections-PvV/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_keyserver/vars/RedHat_x86_64.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node3] => (item=/tmp/collections-PvV/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_keyserver/vars/CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "item is file", "item": "/tmp/collections-PvV/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_keyserver/vars/CentOS.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node3] => (item=/tmp/collections-PvV/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_keyserver/vars/CentOS_x86_64.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "item is file", "item": "/tmp/collections-PvV/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_keyserver/vars/CentOS_x86_64.yml", "skip_reason": "Conditional result was False" } ok: [managed-node3] => (item=/tmp/collections-PvV/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_keyserver/vars/CentOS_9.yml) => { "ansible_facts": { "__keyserver_conf_file": "redis.conf", "__keyserver_conf_path": "/etc/redis", "__keyserver_loaded_modules": [], "__keyserver_name": "redis", "__keyserver_packages_extra": [] }, "ansible_included_var_files": [ "/tmp/collections-PvV/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_keyserver/vars/CentOS_9.yml" ], "ansible_loop_var": "item", "changed": false, "item": "/tmp/collections-PvV/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_keyserver/vars/CentOS_9.yml" } skipping: [managed-node3] => (item=/tmp/collections-PvV/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_keyserver/vars/CentOS_9_x86_64.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "item is file", "item": "/tmp/collections-PvV/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_keyserver/vars/CentOS_9_x86_64.yml", "skip_reason": "Conditional result was False" } ok: [managed-node3] => (item=/tmp/collections-PvV/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_keyserver/vars/CentOS_9.yml) => { "ansible_facts": { "__keyserver_conf_file": "redis.conf", "__keyserver_conf_path": "/etc/redis", "__keyserver_loaded_modules": [], "__keyserver_name": "redis", "__keyserver_packages_extra": [] }, "ansible_included_var_files": [ "/tmp/collections-PvV/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_keyserver/vars/CentOS_9.yml" ], "ansible_loop_var": "item", "changed": false, "item": "/tmp/collections-PvV/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_keyserver/vars/CentOS_9.yml" } skipping: [managed-node3] => (item=/tmp/collections-PvV/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_keyserver/vars/CentOS_9_x86_64.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "item is file", "item": "/tmp/collections-PvV/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_keyserver/vars/CentOS_9_x86_64.yml", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.private_metrics_subrole_keyserver : Check if system is ostree] *** task path: /tmp/collections-PvV/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_keyserver/tasks/main.yml:22 Wednesday 25 September 2024 06:41:30 -0400 (0:00:00.071) 0:00:03.040 *** ok: [managed-node3] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.private_metrics_subrole_keyserver : Set flag to indicate system is ostree] *** task path: /tmp/collections-PvV/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_keyserver/tasks/main.yml:27 Wednesday 25 September 2024 06:41:30 -0400 (0:00:00.399) 0:00:03.440 *** ok: [managed-node3] => { "ansible_facts": { "__ansible_pcp_is_ostree": false }, "changed": false } TASK [fedora.linux_system_roles.private_metrics_subrole_keyserver : Install key server packages] *** task path: /tmp/collections-PvV/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_keyserver/tasks/main.yml:31 Wednesday 25 September 2024 06:41:30 -0400 (0:00:00.024) 0:00:03.464 *** changed: [managed-node3] => { "changed": true, "rc": 0, "results": [ "Installed: valkey-7.2.6-1.el9.x86_64" ] } TASK [fedora.linux_system_roles.private_metrics_subrole_keyserver : Ensure key server configuration directory exists] *** task path: /tmp/collections-PvV/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_keyserver/tasks/main.yml:37 Wednesday 25 September 2024 06:41:33 -0400 (0:00:02.537) 0:00:06.001 *** fatal: [managed-node3]: FAILED! => { "changed": false, "gid": 0, "group": "root", "mode": "0755", "owner": "root", "path": "/etc/redis", "secontext": "unconfined_u:object_r:etc_t:s0", "size": 6, "state": "directory", "uid": 0 } MSG: chown failed: failed to look up user redis TASK [Handle failure case] ***************************************************** task path: /tmp/collections-PvV/ansible_collections/fedora/linux_system_roles/tests/metrics/tests_verify_fullstack.yml:49 Wednesday 25 September 2024 06:41:33 -0400 (0:00:00.428) 0:00:06.430 *** included: /tmp/collections-PvV/ansible_collections/fedora/linux_system_roles/tests/metrics/handle_test_failure.yml for managed-node3 TASK [Collect logs] ************************************************************ task path: /tmp/collections-PvV/ansible_collections/fedora/linux_system_roles/tests/metrics/handle_test_failure.yml:2 Wednesday 25 September 2024 06:41:33 -0400 (0:00:00.027) 0:00:06.457 *** ok: [managed-node3] => { "changed": false, "cmd": "journalctl -ex\necho '##################'\necho List of SELinux AVCs - note list may be empty\ngrep type=AVC /var/log/audit/audit.log\necho '##################'\nls -alrtF /run\nif [ -d /run/pcp ]; then\n ls -alrtF /run/pcp\nelse\n echo ERROR - /run/pcp does not exist\nfi\n", "delta": "0:00:00.045738", "end": "2024-09-25 06:41:34.131000", "rc": 0, "start": "2024-09-25 06:41:34.085262" } STDOUT: Sep 25 06:34:39 localhost augenrules[570]: enabled 1 Sep 25 06:34:39 localhost augenrules[570]: failure 1 Sep 25 06:34:39 localhost augenrules[570]: pid 544 Sep 25 06:34:39 localhost augenrules[570]: rate_limit 0 Sep 25 06:34:39 localhost augenrules[570]: backlog_limit 8192 Sep 25 06:34:39 localhost augenrules[570]: lost 0 Sep 25 06:34:39 localhost augenrules[570]: backlog 4 Sep 25 06:34:39 localhost augenrules[570]: backlog_wait_time 60000 Sep 25 06:34:39 localhost augenrules[570]: backlog_wait_time_actual 0 Sep 25 06:34:39 localhost augenrules[570]: enabled 1 Sep 25 06:34:39 localhost augenrules[570]: failure 1 Sep 25 06:34:39 localhost augenrules[570]: pid 544 Sep 25 06:34:39 localhost augenrules[570]: rate_limit 0 Sep 25 06:34:39 localhost augenrules[570]: backlog_limit 8192 Sep 25 06:34:39 localhost augenrules[570]: lost 0 Sep 25 06:34:39 localhost augenrules[570]: backlog 4 Sep 25 06:34:39 localhost augenrules[570]: backlog_wait_time 60000 Sep 25 06:34:39 localhost augenrules[570]: backlog_wait_time_actual 0 Sep 25 06:34:39 localhost systemd[1]: Started Security Auditing Service. ░░ Subject: A start job for unit auditd.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit auditd.service has finished successfully. ░░ ░░ The job identifier is 246. Sep 25 06:34:39 localhost systemd[1]: Starting Record System Boot/Shutdown in UTMP... ░░ Subject: A start job for unit systemd-update-utmp.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-update-utmp.service has begun execution. ░░ ░░ The job identifier is 242. Sep 25 06:34:39 localhost systemd[1]: Finished Record System Boot/Shutdown in UTMP. ░░ Subject: A start job for unit systemd-update-utmp.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-update-utmp.service has finished successfully. ░░ ░░ The job identifier is 242. Sep 25 06:34:39 localhost systemd[1]: Reached target System Initialization. ░░ Subject: A start job for unit sysinit.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sysinit.target has finished successfully. ░░ ░░ The job identifier is 120. Sep 25 06:34:39 localhost systemd[1]: Started dnf makecache --timer. ░░ Subject: A start job for unit dnf-makecache.timer has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit dnf-makecache.timer has finished successfully. ░░ ░░ The job identifier is 186. Sep 25 06:34:39 localhost systemd[1]: Started Daily rotation of log files. ░░ Subject: A start job for unit logrotate.timer has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit logrotate.timer has finished successfully. ░░ ░░ The job identifier is 184. Sep 25 06:34:39 localhost systemd[1]: Started Daily Cleanup of Temporary Directories. ░░ Subject: A start job for unit systemd-tmpfiles-clean.timer has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-tmpfiles-clean.timer has finished successfully. ░░ ░░ The job identifier is 185. Sep 25 06:34:39 localhost systemd[1]: Reached target Timer Units. ░░ Subject: A start job for unit timers.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit timers.target has finished successfully. ░░ ░░ The job identifier is 183. Sep 25 06:34:39 localhost systemd[1]: Listening on D-Bus System Message Bus Socket. ░░ Subject: A start job for unit dbus.socket has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit dbus.socket has finished successfully. ░░ ░░ The job identifier is 190. Sep 25 06:34:39 localhost systemd[1]: Listening on SSSD Kerberos Cache Manager responder socket. ░░ Subject: A start job for unit sssd-kcm.socket has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sssd-kcm.socket has finished successfully. ░░ ░░ The job identifier is 195. Sep 25 06:34:39 localhost systemd[1]: Reached target Socket Units. ░░ Subject: A start job for unit sockets.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sockets.target has finished successfully. ░░ ░░ The job identifier is 193. Sep 25 06:34:39 localhost systemd[1]: Starting D-Bus System Message Bus... ░░ Subject: A start job for unit dbus-broker.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit dbus-broker.service has begun execution. ░░ ░░ The job identifier is 191. Sep 25 06:34:39 localhost systemd[1]: TPM2 PCR Barrier (Initialization) was skipped because of an unmet condition check (ConditionPathExists=/sys/firmware/efi/efivars/StubPcrKernelImage-4a67b082-0a4c-41cf-b6c7-440b29bb8c4f). ░░ Subject: A start job for unit systemd-pcrphase-sysinit.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-pcrphase-sysinit.service has finished successfully. ░░ ░░ The job identifier is 153. Sep 25 06:34:40 localhost systemd[1]: Started D-Bus System Message Bus. ░░ Subject: A start job for unit dbus-broker.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit dbus-broker.service has finished successfully. ░░ ░░ The job identifier is 191. Sep 25 06:34:40 localhost systemd[1]: Reached target Basic System. ░░ Subject: A start job for unit basic.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit basic.target has finished successfully. ░░ ░░ The job identifier is 117. Sep 25 06:34:40 localhost dbus-broker-lau[578]: Ready Sep 25 06:34:40 localhost systemd[1]: Starting NTP client/server... ░░ Subject: A start job for unit chronyd.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit chronyd.service has begun execution. ░░ ░░ The job identifier is 208. Sep 25 06:34:40 localhost systemd[1]: Starting Initial cloud-init job (pre-networking)... ░░ Subject: A start job for unit cloud-init-local.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit cloud-init-local.service has begun execution. ░░ ░░ The job identifier is 215. Sep 25 06:34:40 localhost systemd[1]: Starting Restore /run/initramfs on shutdown... ░░ Subject: A start job for unit dracut-shutdown.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit dracut-shutdown.service has begun execution. ░░ ░░ The job identifier is 129. Sep 25 06:34:40 localhost systemd[1]: Started irqbalance daemon. ░░ Subject: A start job for unit irqbalance.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit irqbalance.service has finished successfully. ░░ ░░ The job identifier is 240. Sep 25 06:34:40 localhost systemd[1]: Load CPU microcode update was skipped because of an unmet condition check (ConditionPathExists=/sys/devices/system/cpu/microcode/reload). ░░ Subject: A start job for unit microcode.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit microcode.service has finished successfully. ░░ ░░ The job identifier is 182. Sep 25 06:34:40 localhost systemd[1]: Started Hardware RNG Entropy Gatherer Daemon. ░░ Subject: A start job for unit rngd.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit rngd.service has finished successfully. ░░ ░░ The job identifier is 233. Sep 25 06:34:40 localhost systemd[1]: OpenSSH ecdsa Server Key Generation was skipped because of an unmet condition check (ConditionPathExists=!/run/systemd/generator.early/multi-user.target.wants/cloud-init.target). ░░ Subject: A start job for unit sshd-keygen@ecdsa.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sshd-keygen@ecdsa.service has finished successfully. ░░ ░░ The job identifier is 221. Sep 25 06:34:40 localhost systemd[1]: OpenSSH ed25519 Server Key Generation was skipped because of an unmet condition check (ConditionPathExists=!/run/systemd/generator.early/multi-user.target.wants/cloud-init.target). ░░ Subject: A start job for unit sshd-keygen@ed25519.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sshd-keygen@ed25519.service has finished successfully. ░░ ░░ The job identifier is 222. Sep 25 06:34:40 localhost systemd[1]: OpenSSH rsa Server Key Generation was skipped because of an unmet condition check (ConditionPathExists=!/run/systemd/generator.early/multi-user.target.wants/cloud-init.target). ░░ Subject: A start job for unit sshd-keygen@rsa.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sshd-keygen@rsa.service has finished successfully. ░░ ░░ The job identifier is 219. Sep 25 06:34:40 localhost systemd[1]: Reached target sshd-keygen.target. ░░ Subject: A start job for unit sshd-keygen.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sshd-keygen.target has finished successfully. ░░ ░░ The job identifier is 218. Sep 25 06:34:40 localhost systemd[1]: System Security Services Daemon was skipped because no trigger condition checks were met. ░░ Subject: A start job for unit sssd.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sssd.service has finished successfully. ░░ ░░ The job identifier is 238. Sep 25 06:34:40 localhost systemd[1]: Reached target User and Group Name Lookups. ░░ Subject: A start job for unit nss-user-lookup.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit nss-user-lookup.target has finished successfully. ░░ ░░ The job identifier is 239. Sep 25 06:34:40 localhost systemd[1]: Starting User Login Management... ░░ Subject: A start job for unit systemd-logind.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-logind.service has begun execution. ░░ ░░ The job identifier is 235. Sep 25 06:34:40 localhost systemd[1]: Starting Rotate log files... ░░ Subject: A start job for unit logrotate.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit logrotate.service has begun execution. ░░ ░░ The job identifier is 267. Sep 25 06:34:40 localhost systemd[1]: Finished Restore /run/initramfs on shutdown. ░░ Subject: A start job for unit dracut-shutdown.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit dracut-shutdown.service has finished successfully. ░░ ░░ The job identifier is 129. Sep 25 06:34:40 localhost /usr/sbin/irqbalance[583]: libcap-ng used by "/usr/sbin/irqbalance" failed dropping bounding set due to not having CAP_SETPCAP in capng_apply Sep 25 06:34:40 localhost systemd-logind[585]: New seat seat0. ░░ Subject: A new seat seat0 is now available ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A new seat seat0 has been configured and is now available. Sep 25 06:34:40 localhost systemd-logind[585]: Watching system buttons on /dev/input/event0 (Power Button) Sep 25 06:34:40 localhost systemd-logind[585]: Watching system buttons on /dev/input/event1 (Sleep Button) Sep 25 06:34:40 localhost systemd-logind[585]: Watching system buttons on /dev/input/event2 (AT Translated Set 2 keyboard) Sep 25 06:34:40 localhost systemd[1]: Started User Login Management. ░░ Subject: A start job for unit systemd-logind.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-logind.service has finished successfully. ░░ ░░ The job identifier is 235. Sep 25 06:34:40 localhost systemd[1]: logrotate.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit logrotate.service has successfully entered the 'dead' state. Sep 25 06:34:40 localhost systemd[1]: Finished Rotate log files. ░░ Subject: A start job for unit logrotate.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit logrotate.service has finished successfully. ░░ ░░ The job identifier is 267. Sep 25 06:34:41 localhost chronyd[592]: chronyd version 4.6 starting (+CMDMON +NTP +REFCLOCK +RTC +PRIVDROP +SCFILTER +SIGND +ASYNCDNS +NTS +SECHASH +IPV6 +DEBUG) Sep 25 06:34:41 localhost chronyd[592]: Loaded 0 symmetric keys Sep 25 06:34:41 localhost chronyd[592]: Using right/UTC timezone to obtain leap second data Sep 25 06:34:41 localhost chronyd[592]: Frequency 0.000 +/- 1000000.000 ppm read from /var/lib/chrony/drift Sep 25 06:34:41 localhost chronyd[592]: Loaded seccomp filter (level 2) Sep 25 06:34:41 localhost systemd[1]: Started NTP client/server. ░░ Subject: A start job for unit chronyd.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit chronyd.service has finished successfully. ░░ ░░ The job identifier is 208. Sep 25 06:34:41 localhost rngd[584]: Disabling 7: PKCS11 Entropy generator (pkcs11) Sep 25 06:34:41 localhost rngd[584]: Disabling 5: NIST Network Entropy Beacon (nist) Sep 25 06:34:41 localhost rngd[584]: Disabling 9: Qrypt quantum entropy beacon (qrypt) Sep 25 06:34:41 localhost rngd[584]: Initializing available sources Sep 25 06:34:41 localhost rngd[584]: [hwrng ]: Initialization Failed Sep 25 06:34:41 localhost rngd[584]: [rdrand]: Enabling RDRAND rng support Sep 25 06:34:41 localhost rngd[584]: [rdrand]: Initialized Sep 25 06:34:41 localhost rngd[584]: [jitter]: JITTER timeout set to 5 sec Sep 25 06:34:41 localhost rngd[584]: [jitter]: Initializing AES buffer Sep 25 06:34:45 localhost cloud-init[598]: Cloud-init v. 23.4-19.el9 running 'init-local' at Wed, 25 Sep 2024 10:34:45 +0000. Up 26.41 seconds. Sep 25 06:34:46 localhost rngd[584]: [jitter]: Unable to obtain AES key, disabling JITTER source Sep 25 06:34:46 localhost rngd[584]: [jitter]: Initialization Failed Sep 25 06:34:46 localhost rngd[584]: [namedpipe]: Initialization Failed Sep 25 06:34:46 localhost rngd[584]: Process privileges have been dropped to 2:2 Sep 25 06:34:46 localhost dhclient[601]: Internet Systems Consortium DHCP Client 4.4.2b1 Sep 25 06:34:46 localhost dhclient[601]: Copyright 2004-2019 Internet Systems Consortium. Sep 25 06:34:46 localhost dhclient[601]: All rights reserved. Sep 25 06:34:46 localhost dhclient[601]: For info, please visit https://www.isc.org/software/dhcp/ Sep 25 06:34:46 localhost dhclient[601]: Sep 25 06:34:46 localhost dhclient[601]: Listening on LPF/eth0/0a:ff:d9:b3:7f:8d Sep 25 06:34:46 localhost dhclient[601]: Sending on LPF/eth0/0a:ff:d9:b3:7f:8d Sep 25 06:34:46 localhost dhclient[601]: Sending on Socket/fallback Sep 25 06:34:46 localhost dhclient[601]: DHCPDISCOVER on eth0 to 255.255.255.255 port 67 interval 3 (xid=0xd9bc336d) Sep 25 06:34:46 localhost dhclient[601]: DHCPOFFER of 10.31.13.238 from 10.31.12.1 Sep 25 06:34:46 localhost dhclient[601]: DHCPREQUEST for 10.31.13.238 on eth0 to 255.255.255.255 port 67 (xid=0xd9bc336d) Sep 25 06:34:46 localhost dhclient[601]: DHCPACK of 10.31.13.238 from 10.31.12.1 (xid=0xd9bc336d) Sep 25 06:34:46 localhost dhclient[601]: bound to 10.31.13.238 -- renewal in 1657 seconds. Sep 25 06:34:46 localhost systemd[1]: Starting Hostname Service... ░░ Subject: A start job for unit systemd-hostnamed.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-hostnamed.service has begun execution. ░░ ░░ The job identifier is 330. Sep 25 06:34:46 localhost systemd[1]: Started Hostname Service. ░░ Subject: A start job for unit systemd-hostnamed.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-hostnamed.service has finished successfully. ░░ ░░ The job identifier is 330. Sep 25 06:34:46 ip-10-31-13-238.us-east-1.aws.redhat.com systemd-hostnamed[616]: Hostname set to (static) Sep 25 06:34:46 ip-10-31-13-238.us-east-1.aws.redhat.com systemd[1]: Finished Initial cloud-init job (pre-networking). ░░ Subject: A start job for unit cloud-init-local.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit cloud-init-local.service has finished successfully. ░░ ░░ The job identifier is 215. Sep 25 06:34:46 ip-10-31-13-238.us-east-1.aws.redhat.com systemd[1]: Reached target Preparation for Network. ░░ Subject: A start job for unit network-pre.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit network-pre.target has finished successfully. ░░ ░░ The job identifier is 155. Sep 25 06:34:47 ip-10-31-13-238.us-east-1.aws.redhat.com systemd[1]: Starting Network Manager... ░░ Subject: A start job for unit NetworkManager.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager.service has begun execution. ░░ ░░ The job identifier is 189. Sep 25 06:34:47 ip-10-31-13-238.us-east-1.aws.redhat.com NetworkManager[620]: [1727260487.9017] NetworkManager (version 1.51.0-1.el9) is starting... (boot:7f951c62-95d7-44c4-9332-b653316aa59b) Sep 25 06:34:47 ip-10-31-13-238.us-east-1.aws.redhat.com NetworkManager[620]: [1727260487.9020] Read config: /etc/NetworkManager/NetworkManager.conf (run: 15-carrier-timeout.conf) Sep 25 06:34:48 ip-10-31-13-238.us-east-1.aws.redhat.com NetworkManager[620]: [1727260488.0251] manager[0x55ef72180080]: monitoring kernel firmware directory '/lib/firmware'. Sep 25 06:34:48 ip-10-31-13-238.us-east-1.aws.redhat.com NetworkManager[620]: [1727260488.0273] hostname: hostname: using hostnamed Sep 25 06:34:48 ip-10-31-13-238.us-east-1.aws.redhat.com NetworkManager[620]: [1727260488.0273] hostname: static hostname changed from (none) to "ip-10-31-13-238.us-east-1.aws.redhat.com" Sep 25 06:34:48 ip-10-31-13-238.us-east-1.aws.redhat.com NetworkManager[620]: [1727260488.0307] dns-mgr: init: dns=default,systemd-resolved rc-manager=symlink (auto) Sep 25 06:34:48 ip-10-31-13-238.us-east-1.aws.redhat.com NetworkManager[620]: [1727260488.0934] manager[0x55ef72180080]: rfkill: Wi-Fi hardware radio set enabled Sep 25 06:34:48 ip-10-31-13-238.us-east-1.aws.redhat.com NetworkManager[620]: [1727260488.0935] manager[0x55ef72180080]: rfkill: WWAN hardware radio set enabled Sep 25 06:34:48 ip-10-31-13-238.us-east-1.aws.redhat.com NetworkManager[620]: [1727260488.1069] Loaded device plugin: NMTeamFactory (/usr/lib64/NetworkManager/1.51.0-1.el9/libnm-device-plugin-team.so) Sep 25 06:34:48 ip-10-31-13-238.us-east-1.aws.redhat.com NetworkManager[620]: [1727260488.1069] manager: rfkill: Wi-Fi enabled by radio killswitch; enabled by state file Sep 25 06:34:48 ip-10-31-13-238.us-east-1.aws.redhat.com NetworkManager[620]: [1727260488.1074] manager: rfkill: WWAN enabled by radio killswitch; enabled by state file Sep 25 06:34:48 ip-10-31-13-238.us-east-1.aws.redhat.com NetworkManager[620]: [1727260488.1076] manager: Networking is enabled by state file Sep 25 06:34:48 ip-10-31-13-238.us-east-1.aws.redhat.com NetworkManager[620]: [1727260488.1105] settings: Loaded settings plugin: keyfile (internal) Sep 25 06:34:48 ip-10-31-13-238.us-east-1.aws.redhat.com systemd[1]: Listening on Load/Save RF Kill Switch Status /dev/rfkill Watch. ░░ Subject: A start job for unit systemd-rfkill.socket has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-rfkill.socket has finished successfully. ░░ ░░ The job identifier is 395. Sep 25 06:34:48 ip-10-31-13-238.us-east-1.aws.redhat.com systemd[1]: Starting Network Manager Script Dispatcher Service... ░░ Subject: A start job for unit NetworkManager-dispatcher.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has begun execution. ░░ ░░ The job identifier is 402. Sep 25 06:34:48 ip-10-31-13-238.us-east-1.aws.redhat.com NetworkManager[620]: [1727260488.1541] settings: Loaded settings plugin: ifcfg-rh ("/usr/lib64/NetworkManager/1.51.0-1.el9/libnm-settings-plugin-ifcfg-rh.so") Sep 25 06:34:48 ip-10-31-13-238.us-east-1.aws.redhat.com NetworkManager[620]: [1727260488.1576] Warning: the ifcfg-rh plugin is deprecated, please migrate connections to the keyfile format using "nmcli connection migrate" Sep 25 06:34:48 ip-10-31-13-238.us-east-1.aws.redhat.com NetworkManager[620]: [1727260488.1584] dhcp: init: Using DHCP client 'internal' Sep 25 06:34:48 ip-10-31-13-238.us-east-1.aws.redhat.com NetworkManager[620]: [1727260488.1586] manager: (lo): new Loopback device (/org/freedesktop/NetworkManager/Devices/1) Sep 25 06:34:48 ip-10-31-13-238.us-east-1.aws.redhat.com NetworkManager[620]: [1727260488.1596] device (lo): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external') Sep 25 06:34:48 ip-10-31-13-238.us-east-1.aws.redhat.com NetworkManager[620]: [1727260488.1600] device (lo): state change: unavailable -> disconnected (reason 'connection-assumed', managed-type: 'external') Sep 25 06:34:48 ip-10-31-13-238.us-east-1.aws.redhat.com NetworkManager[620]: [1727260488.1607] device (lo): Activation: starting connection 'lo' (80e3d2b6-c119-4663-9fb8-e8cd7022485a) Sep 25 06:34:48 ip-10-31-13-238.us-east-1.aws.redhat.com NetworkManager[620]: [1727260488.1613] manager: (eth0): new Ethernet device (/org/freedesktop/NetworkManager/Devices/2) Sep 25 06:34:48 ip-10-31-13-238.us-east-1.aws.redhat.com NetworkManager[620]: [1727260488.1616] device (eth0): state change: unmanaged -> unavailable (reason 'managed', managed-type: 'external') Sep 25 06:34:48 ip-10-31-13-238.us-east-1.aws.redhat.com systemd[1]: Started Network Manager. ░░ Subject: A start job for unit NetworkManager.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager.service has finished successfully. ░░ ░░ The job identifier is 189. Sep 25 06:34:48 ip-10-31-13-238.us-east-1.aws.redhat.com systemd[1]: Reached target Network. ░░ Subject: A start job for unit network.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit network.target has finished successfully. ░░ ░░ The job identifier is 192. Sep 25 06:34:48 ip-10-31-13-238.us-east-1.aws.redhat.com NetworkManager[620]: [1727260488.1652] bus-manager: acquired D-Bus service "org.freedesktop.NetworkManager" Sep 25 06:34:48 ip-10-31-13-238.us-east-1.aws.redhat.com NetworkManager[620]: [1727260488.1656] device (lo): state change: disconnected -> prepare (reason 'none', managed-type: 'external') Sep 25 06:34:48 ip-10-31-13-238.us-east-1.aws.redhat.com NetworkManager[620]: [1727260488.1658] device (lo): state change: prepare -> config (reason 'none', managed-type: 'external') Sep 25 06:34:48 ip-10-31-13-238.us-east-1.aws.redhat.com NetworkManager[620]: [1727260488.1660] device (lo): state change: config -> ip-config (reason 'none', managed-type: 'external') Sep 25 06:34:48 ip-10-31-13-238.us-east-1.aws.redhat.com NetworkManager[620]: [1727260488.1662] device (eth0): carrier: link connected Sep 25 06:34:48 ip-10-31-13-238.us-east-1.aws.redhat.com NetworkManager[620]: [1727260488.1664] device (lo): state change: ip-config -> ip-check (reason 'none', managed-type: 'external') Sep 25 06:34:48 ip-10-31-13-238.us-east-1.aws.redhat.com NetworkManager[620]: [1727260488.1670] device (eth0): state change: unavailable -> disconnected (reason 'carrier-changed', managed-type: 'full') Sep 25 06:34:48 ip-10-31-13-238.us-east-1.aws.redhat.com NetworkManager[620]: [1727260488.1674] policy: auto-activating connection 'System eth0' (5fb06bd0-0bb0-7ffb-45f1-d6edd65f3e03) Sep 25 06:34:48 ip-10-31-13-238.us-east-1.aws.redhat.com NetworkManager[620]: [1727260488.1678] device (eth0): Activation: starting connection 'System eth0' (5fb06bd0-0bb0-7ffb-45f1-d6edd65f3e03) Sep 25 06:34:48 ip-10-31-13-238.us-east-1.aws.redhat.com NetworkManager[620]: [1727260488.1679] device (eth0): state change: disconnected -> prepare (reason 'none', managed-type: 'full') Sep 25 06:34:48 ip-10-31-13-238.us-east-1.aws.redhat.com NetworkManager[620]: [1727260488.1682] manager: NetworkManager state is now CONNECTING Sep 25 06:34:48 ip-10-31-13-238.us-east-1.aws.redhat.com NetworkManager[620]: [1727260488.1683] device (eth0): state change: prepare -> config (reason 'none', managed-type: 'full') Sep 25 06:34:48 ip-10-31-13-238.us-east-1.aws.redhat.com NetworkManager[620]: [1727260488.1688] device (eth0): state change: config -> ip-config (reason 'none', managed-type: 'full') Sep 25 06:34:48 ip-10-31-13-238.us-east-1.aws.redhat.com NetworkManager[620]: [1727260488.1690] dhcp4 (eth0): activation: beginning transaction (timeout in 45 seconds) Sep 25 06:34:48 ip-10-31-13-238.us-east-1.aws.redhat.com NetworkManager[620]: [1727260488.1711] dhcp4 (eth0): state changed new lease, address=10.31.13.238 Sep 25 06:34:48 ip-10-31-13-238.us-east-1.aws.redhat.com NetworkManager[620]: [1727260488.1716] policy: set 'System eth0' (eth0) as default for IPv4 routing and DNS Sep 25 06:34:48 ip-10-31-13-238.us-east-1.aws.redhat.com systemd[1]: Starting Network Manager Wait Online... ░░ Subject: A start job for unit NetworkManager-wait-online.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-wait-online.service has begun execution. ░░ ░░ The job identifier is 188. Sep 25 06:34:48 ip-10-31-13-238.us-east-1.aws.redhat.com systemd[1]: Starting GSSAPI Proxy Daemon... ░░ Subject: A start job for unit gssproxy.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit gssproxy.service has begun execution. ░░ ░░ The job identifier is 206. Sep 25 06:34:48 ip-10-31-13-238.us-east-1.aws.redhat.com NetworkManager[620]: [1727260488.1827] device (eth0): state change: ip-config -> ip-check (reason 'none', managed-type: 'full') Sep 25 06:34:48 ip-10-31-13-238.us-east-1.aws.redhat.com systemd[1]: Started GSSAPI Proxy Daemon. ░░ Subject: A start job for unit gssproxy.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit gssproxy.service has finished successfully. ░░ ░░ The job identifier is 206. Sep 25 06:34:48 ip-10-31-13-238.us-east-1.aws.redhat.com systemd[1]: RPC security service for NFS client and server was skipped because of an unmet condition check (ConditionPathExists=/etc/krb5.keytab). ░░ Subject: A start job for unit rpc-gssd.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit rpc-gssd.service has finished successfully. ░░ ░░ The job identifier is 202. Sep 25 06:34:48 ip-10-31-13-238.us-east-1.aws.redhat.com systemd[1]: Reached target NFS client services. ░░ Subject: A start job for unit nfs-client.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit nfs-client.target has finished successfully. ░░ ░░ The job identifier is 199. Sep 25 06:34:48 ip-10-31-13-238.us-east-1.aws.redhat.com systemd[1]: Reached target Preparation for Remote File Systems. ░░ Subject: A start job for unit remote-fs-pre.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit remote-fs-pre.target has finished successfully. ░░ ░░ The job identifier is 207. Sep 25 06:34:48 ip-10-31-13-238.us-east-1.aws.redhat.com systemd[1]: Reached target Remote File Systems. ░░ Subject: A start job for unit remote-fs.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit remote-fs.target has finished successfully. ░░ ░░ The job identifier is 248. Sep 25 06:34:48 ip-10-31-13-238.us-east-1.aws.redhat.com systemd[1]: TPM2 PCR Barrier (User) was skipped because of an unmet condition check (ConditionPathExists=/sys/firmware/efi/efivars/StubPcrKernelImage-4a67b082-0a4c-41cf-b6c7-440b29bb8c4f). ░░ Subject: A start job for unit systemd-pcrphase.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-pcrphase.service has finished successfully. ░░ ░░ The job identifier is 146. Sep 25 06:34:48 ip-10-31-13-238.us-east-1.aws.redhat.com systemd[1]: Started Network Manager Script Dispatcher Service. ░░ Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has finished successfully. ░░ ░░ The job identifier is 402. Sep 25 06:34:48 ip-10-31-13-238.us-east-1.aws.redhat.com NetworkManager[620]: [1727260488.4868] device (lo): state change: ip-check -> secondaries (reason 'none', managed-type: 'external') Sep 25 06:34:48 ip-10-31-13-238.us-east-1.aws.redhat.com NetworkManager[620]: [1727260488.4871] device (lo): state change: secondaries -> activated (reason 'none', managed-type: 'external') Sep 25 06:34:48 ip-10-31-13-238.us-east-1.aws.redhat.com NetworkManager[620]: [1727260488.4876] device (lo): Activation: successful, device activated. Sep 25 06:34:48 ip-10-31-13-238.us-east-1.aws.redhat.com NetworkManager[620]: [1727260488.4941] device (eth0): state change: ip-check -> secondaries (reason 'none', managed-type: 'full') Sep 25 06:34:48 ip-10-31-13-238.us-east-1.aws.redhat.com NetworkManager[620]: [1727260488.4944] device (eth0): state change: secondaries -> activated (reason 'none', managed-type: 'full') Sep 25 06:34:48 ip-10-31-13-238.us-east-1.aws.redhat.com NetworkManager[620]: [1727260488.4947] manager: NetworkManager state is now CONNECTED_SITE Sep 25 06:34:48 ip-10-31-13-238.us-east-1.aws.redhat.com NetworkManager[620]: [1727260488.4949] device (eth0): Activation: successful, device activated. Sep 25 06:34:48 ip-10-31-13-238.us-east-1.aws.redhat.com NetworkManager[620]: [1727260488.4958] manager: NetworkManager state is now CONNECTED_GLOBAL Sep 25 06:34:48 ip-10-31-13-238.us-east-1.aws.redhat.com NetworkManager[620]: [1727260488.4960] manager: startup complete Sep 25 06:34:48 ip-10-31-13-238.us-east-1.aws.redhat.com systemd[1]: Finished Network Manager Wait Online. ░░ Subject: A start job for unit NetworkManager-wait-online.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-wait-online.service has finished successfully. ░░ ░░ The job identifier is 188. Sep 25 06:34:48 ip-10-31-13-238.us-east-1.aws.redhat.com systemd[1]: Starting Initial cloud-init job (metadata service crawler)... ░░ Subject: A start job for unit cloud-init.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit cloud-init.service has begun execution. ░░ ░░ The job identifier is 216. Sep 25 06:34:48 ip-10-31-13-238.us-east-1.aws.redhat.com chronyd[592]: Added source 10.11.160.238 Sep 25 06:34:48 ip-10-31-13-238.us-east-1.aws.redhat.com chronyd[592]: Added source 10.18.100.10 Sep 25 06:34:48 ip-10-31-13-238.us-east-1.aws.redhat.com chronyd[592]: Added source 10.2.32.37 Sep 25 06:34:48 ip-10-31-13-238.us-east-1.aws.redhat.com chronyd[592]: Added source 10.2.32.38 Sep 25 06:34:48 ip-10-31-13-238.us-east-1.aws.redhat.com cloud-init[711]: Cloud-init v. 23.4-19.el9 running 'init' at Wed, 25 Sep 2024 10:34:48 +0000. Up 29.72 seconds. Sep 25 06:34:48 ip-10-31-13-238.us-east-1.aws.redhat.com cloud-init[711]: ci-info: ++++++++++++++++++++++++++++++++++++++Net device info+++++++++++++++++++++++++++++++++++++++ Sep 25 06:34:48 ip-10-31-13-238.us-east-1.aws.redhat.com cloud-init[711]: ci-info: +--------+------+-----------------------------+---------------+--------+-------------------+ Sep 25 06:34:48 ip-10-31-13-238.us-east-1.aws.redhat.com cloud-init[711]: ci-info: | Device | Up | Address | Mask | Scope | Hw-Address | Sep 25 06:34:48 ip-10-31-13-238.us-east-1.aws.redhat.com cloud-init[711]: ci-info: +--------+------+-----------------------------+---------------+--------+-------------------+ Sep 25 06:34:48 ip-10-31-13-238.us-east-1.aws.redhat.com cloud-init[711]: ci-info: | eth0 | True | 10.31.13.238 | 255.255.252.0 | global | 0a:ff:d9:b3:7f:8d | Sep 25 06:34:48 ip-10-31-13-238.us-east-1.aws.redhat.com cloud-init[711]: ci-info: | eth0 | True | fe80::8ff:d9ff:feb3:7f8d/64 | . | link | 0a:ff:d9:b3:7f:8d | Sep 25 06:34:48 ip-10-31-13-238.us-east-1.aws.redhat.com cloud-init[711]: ci-info: | lo | True | 127.0.0.1 | 255.0.0.0 | host | . | Sep 25 06:34:48 ip-10-31-13-238.us-east-1.aws.redhat.com cloud-init[711]: ci-info: | lo | True | ::1/128 | . | host | . | Sep 25 06:34:48 ip-10-31-13-238.us-east-1.aws.redhat.com cloud-init[711]: ci-info: +--------+------+-----------------------------+---------------+--------+-------------------+ Sep 25 06:34:48 ip-10-31-13-238.us-east-1.aws.redhat.com cloud-init[711]: ci-info: ++++++++++++++++++++++++++++Route IPv4 info+++++++++++++++++++++++++++++ Sep 25 06:34:48 ip-10-31-13-238.us-east-1.aws.redhat.com cloud-init[711]: ci-info: +-------+-------------+------------+---------------+-----------+-------+ Sep 25 06:34:48 ip-10-31-13-238.us-east-1.aws.redhat.com cloud-init[711]: ci-info: | Route | Destination | Gateway | Genmask | Interface | Flags | Sep 25 06:34:48 ip-10-31-13-238.us-east-1.aws.redhat.com cloud-init[711]: ci-info: +-------+-------------+------------+---------------+-----------+-------+ Sep 25 06:34:48 ip-10-31-13-238.us-east-1.aws.redhat.com cloud-init[711]: ci-info: | 0 | 0.0.0.0 | 10.31.12.1 | 0.0.0.0 | eth0 | UG | Sep 25 06:34:48 ip-10-31-13-238.us-east-1.aws.redhat.com cloud-init[711]: ci-info: | 1 | 10.31.12.0 | 0.0.0.0 | 255.255.252.0 | eth0 | U | Sep 25 06:34:48 ip-10-31-13-238.us-east-1.aws.redhat.com cloud-init[711]: ci-info: +-------+-------------+------------+---------------+-----------+-------+ Sep 25 06:34:48 ip-10-31-13-238.us-east-1.aws.redhat.com cloud-init[711]: ci-info: +++++++++++++++++++Route IPv6 info+++++++++++++++++++ Sep 25 06:34:48 ip-10-31-13-238.us-east-1.aws.redhat.com cloud-init[711]: ci-info: +-------+-------------+---------+-----------+-------+ Sep 25 06:34:48 ip-10-31-13-238.us-east-1.aws.redhat.com cloud-init[711]: ci-info: | Route | Destination | Gateway | Interface | Flags | Sep 25 06:34:48 ip-10-31-13-238.us-east-1.aws.redhat.com cloud-init[711]: ci-info: +-------+-------------+---------+-----------+-------+ Sep 25 06:34:48 ip-10-31-13-238.us-east-1.aws.redhat.com cloud-init[711]: ci-info: | 1 | fe80::/64 | :: | eth0 | U | Sep 25 06:34:48 ip-10-31-13-238.us-east-1.aws.redhat.com cloud-init[711]: ci-info: | 3 | multicast | :: | eth0 | U | Sep 25 06:34:48 ip-10-31-13-238.us-east-1.aws.redhat.com cloud-init[711]: ci-info: +-------+-------------+---------+-----------+-------+ Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com irqbalance[583]: Cannot change IRQ 0 affinity: Input/output error Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com irqbalance[583]: IRQ 0 affinity is now unmanaged Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com irqbalance[583]: Cannot change IRQ 48 affinity: Input/output error Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com irqbalance[583]: IRQ 48 affinity is now unmanaged Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com irqbalance[583]: Cannot change IRQ 49 affinity: Input/output error Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com irqbalance[583]: IRQ 49 affinity is now unmanaged Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com irqbalance[583]: Cannot change IRQ 50 affinity: Input/output error Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com irqbalance[583]: IRQ 50 affinity is now unmanaged Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com irqbalance[583]: Cannot change IRQ 51 affinity: Input/output error Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com irqbalance[583]: IRQ 51 affinity is now unmanaged Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com irqbalance[583]: Cannot change IRQ 52 affinity: Input/output error Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com irqbalance[583]: IRQ 52 affinity is now unmanaged Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com irqbalance[583]: Cannot change IRQ 53 affinity: Input/output error Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com irqbalance[583]: IRQ 53 affinity is now unmanaged Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com irqbalance[583]: Cannot change IRQ 54 affinity: Input/output error Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com irqbalance[583]: IRQ 54 affinity is now unmanaged Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com irqbalance[583]: Cannot change IRQ 55 affinity: Input/output error Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com irqbalance[583]: IRQ 55 affinity is now unmanaged Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com irqbalance[583]: Cannot change IRQ 56 affinity: Input/output error Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com irqbalance[583]: IRQ 56 affinity is now unmanaged Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com irqbalance[583]: Cannot change IRQ 57 affinity: Input/output error Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com irqbalance[583]: IRQ 57 affinity is now unmanaged Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com irqbalance[583]: Cannot change IRQ 58 affinity: Input/output error Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com irqbalance[583]: IRQ 58 affinity is now unmanaged Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com irqbalance[583]: Cannot change IRQ 59 affinity: Input/output error Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com irqbalance[583]: IRQ 59 affinity is now unmanaged Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com cloud-init[711]: Generating public/private rsa key pair. Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com cloud-init[711]: Your identification has been saved in /etc/ssh/ssh_host_rsa_key Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com cloud-init[711]: Your public key has been saved in /etc/ssh/ssh_host_rsa_key.pub Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com cloud-init[711]: The key fingerprint is: Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com cloud-init[711]: SHA256:w/zbSb1F+owKVubKh1hy5ds+Iz0rpyesexjXrIXXxRY root@ip-10-31-13-238.us-east-1.aws.redhat.com Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com cloud-init[711]: The key's randomart image is: Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com cloud-init[711]: +---[RSA 3072]----+ Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com cloud-init[711]: | | Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com cloud-init[711]: | E | Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com cloud-init[711]: | ..| Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com cloud-init[711]: | o . +| Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com cloud-init[711]: | S oo+ oo| Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com cloud-init[711]: | .o+++.=o.| Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com cloud-init[711]: | =+*oBo .| Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com cloud-init[711]: | .oo*X.O* | Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com cloud-init[711]: | *==X==o| Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com cloud-init[711]: +----[SHA256]-----+ Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com cloud-init[711]: Generating public/private dsa key pair. Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com cloud-init[711]: Your identification has been saved in /etc/ssh/ssh_host_dsa_key Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com cloud-init[711]: Your public key has been saved in /etc/ssh/ssh_host_dsa_key.pub Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com cloud-init[711]: The key fingerprint is: Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com cloud-init[711]: SHA256:wc5WrSpgWtQs+2EhiAWc8du3KP14hwlaBgrPE39j4gI root@ip-10-31-13-238.us-east-1.aws.redhat.com Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com cloud-init[711]: The key's randomart image is: Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com cloud-init[711]: +---[DSA 1024]----+ Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com cloud-init[711]: |oo+ | Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com cloud-init[711]: | =.. o . . | Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com cloud-init[711]: |. ..+ + o . . | Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com cloud-init[711]: |. .oo+ + o . | Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com cloud-init[711]: |.o.+*.o.S . | Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com cloud-init[711]: |E.+++B*o.. | Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com cloud-init[711]: | ..++*+o+ | Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com cloud-init[711]: | ..o o= . | Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com cloud-init[711]: | . .... | Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com cloud-init[711]: +----[SHA256]-----+ Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com cloud-init[711]: Generating public/private ecdsa key pair. Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com cloud-init[711]: Your identification has been saved in /etc/ssh/ssh_host_ecdsa_key Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com cloud-init[711]: Your public key has been saved in /etc/ssh/ssh_host_ecdsa_key.pub Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com cloud-init[711]: The key fingerprint is: Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com cloud-init[711]: SHA256:JVvOpjStzqyke3D+fAVZH05fZJMrO9Kp5DEJnFDW29k root@ip-10-31-13-238.us-east-1.aws.redhat.com Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com cloud-init[711]: The key's randomart image is: Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com cloud-init[711]: +---[ECDSA 256]---+ Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com cloud-init[711]: | .o. o+| Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com cloud-init[711]: | .. .. ooo| Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com cloud-init[711]: | + +oo+ooo| Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com cloud-init[711]: | %o. +oE.| Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com cloud-init[711]: | S *.o + | Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com cloud-init[711]: | . .. = *.= | Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com cloud-init[711]: | +. o o.= . | Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com cloud-init[711]: | oo= .o | Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com cloud-init[711]: | oo.o*. | Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com cloud-init[711]: +----[SHA256]-----+ Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com cloud-init[711]: Generating public/private ed25519 key pair. Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com cloud-init[711]: Your identification has been saved in /etc/ssh/ssh_host_ed25519_key Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com cloud-init[711]: Your public key has been saved in /etc/ssh/ssh_host_ed25519_key.pub Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com cloud-init[711]: The key fingerprint is: Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com cloud-init[711]: SHA256:oQYaYd4Lna+LIsH7uNrNt0+zQS2v/atrUShZ6dYBOko root@ip-10-31-13-238.us-east-1.aws.redhat.com Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com cloud-init[711]: The key's randomart image is: Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com cloud-init[711]: +--[ED25519 256]--+ Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com cloud-init[711]: | o .o | Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com cloud-init[711]: | o + . .o . | Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com cloud-init[711]: | + = E ++ o . | Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com cloud-init[711]: | + = oo++ o | Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com systemd[1]: Finished Initial cloud-init job (metadata service crawler). ░░ Subject: A start job for unit cloud-init.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit cloud-init.service has finished successfully. ░░ ░░ The job identifier is 216. Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com cloud-init[711]: |. . . = So.. | Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com cloud-init[711]: |.. o . o. | Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com cloud-init[711]: | .. . + .. | Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com cloud-init[711]: |oo.+ ... *. | Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com cloud-init[711]: |++=.+..o+.++o. | Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com cloud-init[711]: +----[SHA256]-----+ Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com systemd[1]: Reached target Cloud-config availability. ░░ Subject: A start job for unit cloud-config.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit cloud-config.target has finished successfully. ░░ ░░ The job identifier is 214. Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com systemd[1]: Reached target Network is Online. ░░ Subject: A start job for unit network-online.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit network-online.target has finished successfully. ░░ ░░ The job identifier is 187. Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com systemd[1]: Starting Apply the settings specified in cloud-config... ░░ Subject: A start job for unit cloud-config.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit cloud-config.service has begun execution. ░░ ░░ The job identifier is 213. Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com systemd[1]: Starting Crash recovery kernel arming... ░░ Subject: A start job for unit kdump.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit kdump.service has begun execution. ░░ ░░ The job identifier is 232. Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com systemd[1]: Starting The restraint harness.... ░░ Subject: A start job for unit restraintd.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit restraintd.service has begun execution. ░░ ░░ The job identifier is 244. Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com systemd[1]: Starting Notify NFS peers of a restart... ░░ Subject: A start job for unit rpc-statd-notify.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit rpc-statd-notify.service has begun execution. ░░ ░░ The job identifier is 200. Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com sm-notify[788]: Version 2.5.4 starting Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com systemd[1]: Starting System Logging Service... ░░ Subject: A start job for unit rsyslog.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit rsyslog.service has begun execution. ░░ ░░ The job identifier is 247. Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com systemd[1]: Starting OpenSSH server daemon... ░░ Subject: A start job for unit sshd.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sshd.service has begun execution. ░░ ░░ The job identifier is 217. Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com systemd[1]: Started The restraint harness.. ░░ Subject: A start job for unit restraintd.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit restraintd.service has finished successfully. ░░ ░░ The job identifier is 244. Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com systemd[1]: Started Notify NFS peers of a restart. ░░ Subject: A start job for unit rpc-statd-notify.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit rpc-statd-notify.service has finished successfully. ░░ ░░ The job identifier is 200. Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com sshd[799]: Server listening on 0.0.0.0 port 22. Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com sshd[799]: Server listening on :: port 22. Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com systemd[1]: Started OpenSSH server daemon. ░░ Subject: A start job for unit sshd.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sshd.service has finished successfully. ░░ ░░ The job identifier is 217. Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com cloud-init[812]: Cloud-init v. 23.4-19.el9 running 'modules:config' at Wed, 25 Sep 2024 10:34:51 +0000. Up 32.58 seconds. Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com rsyslogd[798]: [origin software="rsyslogd" swVersion="8.2310.0-4.el9" x-pid="798" x-info="https://www.rsyslog.com"] start Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com systemd[1]: Started System Logging Service. ░░ Subject: A start job for unit rsyslog.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit rsyslog.service has finished successfully. ░░ ░░ The job identifier is 247. Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com sshd[799]: Received signal 15; terminating. Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com systemd[1]: Stopping OpenSSH server daemon... ░░ Subject: A stop job for unit sshd.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit sshd.service has begun execution. ░░ ░░ The job identifier is 480. Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com systemd[1]: sshd.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit sshd.service has successfully entered the 'dead' state. Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com systemd[1]: Stopped OpenSSH server daemon. ░░ Subject: A stop job for unit sshd.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit sshd.service has finished. ░░ ░░ The job identifier is 480 and the job result is done. Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com systemd[1]: Stopped target sshd-keygen.target. ░░ Subject: A stop job for unit sshd-keygen.target has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit sshd-keygen.target has finished. ░░ ░░ The job identifier is 548 and the job result is done. Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com systemd[1]: Stopping sshd-keygen.target... ░░ Subject: A stop job for unit sshd-keygen.target has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit sshd-keygen.target has begun execution. ░░ ░░ The job identifier is 548. Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com systemd[1]: OpenSSH ecdsa Server Key Generation was skipped because of an unmet condition check (ConditionPathExists=!/run/systemd/generator.early/multi-user.target.wants/cloud-init.target). ░░ Subject: A start job for unit sshd-keygen@ecdsa.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sshd-keygen@ecdsa.service has finished successfully. ░░ ░░ The job identifier is 546. Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com systemd[1]: OpenSSH ed25519 Server Key Generation was skipped because of an unmet condition check (ConditionPathExists=!/run/systemd/generator.early/multi-user.target.wants/cloud-init.target). ░░ Subject: A start job for unit sshd-keygen@ed25519.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sshd-keygen@ed25519.service has finished successfully. ░░ ░░ The job identifier is 547. Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com systemd[1]: OpenSSH rsa Server Key Generation was skipped because of an unmet condition check (ConditionPathExists=!/run/systemd/generator.early/multi-user.target.wants/cloud-init.target). ░░ Subject: A start job for unit sshd-keygen@rsa.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sshd-keygen@rsa.service has finished successfully. ░░ ░░ The job identifier is 544. Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com systemd[1]: Reached target sshd-keygen.target. ░░ Subject: A start job for unit sshd-keygen.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sshd-keygen.target has finished successfully. ░░ ░░ The job identifier is 548. Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com systemd[1]: Starting OpenSSH server daemon... ░░ Subject: A start job for unit sshd.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sshd.service has begun execution. ░░ ░░ The job identifier is 480. Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com rsyslogd[798]: imjournal: journal files changed, reloading... [v8.2310.0-4.el9 try https://www.rsyslog.com/e/0 ] Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com sshd[843]: Server listening on 0.0.0.0 port 22. Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com sshd[843]: Server listening on :: port 22. Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com systemd[1]: Started OpenSSH server daemon. ░░ Subject: A start job for unit sshd.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sshd.service has finished successfully. ░░ ░░ The job identifier is 480. Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com systemd[1]: Finished Apply the settings specified in cloud-config. ░░ Subject: A start job for unit cloud-config.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit cloud-config.service has finished successfully. ░░ ░░ The job identifier is 213. Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com systemd[1]: Starting Execute cloud user/final scripts... ░░ Subject: A start job for unit cloud-final.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit cloud-final.service has begun execution. ░░ ░░ The job identifier is 223. Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com systemd[1]: Starting Permit User Sessions... ░░ Subject: A start job for unit systemd-user-sessions.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-user-sessions.service has begun execution. ░░ ░░ The job identifier is 243. Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com systemd[1]: Finished Permit User Sessions. ░░ Subject: A start job for unit systemd-user-sessions.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-user-sessions.service has finished successfully. ░░ ░░ The job identifier is 243. Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com systemd[1]: Started Command Scheduler. ░░ Subject: A start job for unit crond.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit crond.service has finished successfully. ░░ ░░ The job identifier is 245. Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com systemd[1]: Started Getty on tty1. ░░ Subject: A start job for unit getty@tty1.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit getty@tty1.service has finished successfully. ░░ ░░ The job identifier is 230. Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com systemd[1]: Started Serial Getty on ttyS0. ░░ Subject: A start job for unit serial-getty@ttyS0.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit serial-getty@ttyS0.service has finished successfully. ░░ ░░ The job identifier is 225. Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com systemd[1]: Reached target Login Prompts. ░░ Subject: A start job for unit getty.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit getty.target has finished successfully. ░░ ░░ The job identifier is 224. Sep 25 06:34:51 ip-10-31-13-238.us-east-1.aws.redhat.com systemd[1]: Reached target Multi-User System. ░░ Subject: A start job for unit multi-user.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit multi-user.target has finished successfully. ░░ ░░ The job identifier is 116. Sep 25 06:34:52 ip-10-31-13-238.us-east-1.aws.redhat.com crond[850]: (CRON) STARTUP (1.5.7) Sep 25 06:34:52 ip-10-31-13-238.us-east-1.aws.redhat.com crond[850]: (CRON) INFO (Syslog will be used instead of sendmail.) Sep 25 06:34:52 ip-10-31-13-238.us-east-1.aws.redhat.com crond[850]: (CRON) INFO (RANDOM_DELAY will be scaled with factor 7% if used.) Sep 25 06:34:52 ip-10-31-13-238.us-east-1.aws.redhat.com crond[850]: (CRON) INFO (running with inotify support) Sep 25 06:34:52 ip-10-31-13-238.us-east-1.aws.redhat.com systemd[1]: Starting Record Runlevel Change in UTMP... ░░ Subject: A start job for unit systemd-update-utmp-runlevel.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-update-utmp-runlevel.service has begun execution. ░░ ░░ The job identifier is 241. Sep 25 06:34:52 ip-10-31-13-238.us-east-1.aws.redhat.com systemd[1]: systemd-update-utmp-runlevel.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-update-utmp-runlevel.service has successfully entered the 'dead' state. Sep 25 06:34:52 ip-10-31-13-238.us-east-1.aws.redhat.com systemd[1]: Finished Record Runlevel Change in UTMP. ░░ Subject: A start job for unit systemd-update-utmp-runlevel.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-update-utmp-runlevel.service has finished successfully. ░░ ░░ The job identifier is 241. Sep 25 06:34:52 ip-10-31-13-238.us-east-1.aws.redhat.com restraintd[800]: Listening on http://localhost:8081 Sep 25 06:34:52 ip-10-31-13-238.us-east-1.aws.redhat.com cloud-init[857]: Cloud-init v. 23.4-19.el9 running 'modules:final' at Wed, 25 Sep 2024 10:34:52 +0000. Up 33.15 seconds. Sep 25 06:34:52 ip-10-31-13-238.us-east-1.aws.redhat.com cloud-init[861]: ############################################################# Sep 25 06:34:52 ip-10-31-13-238.us-east-1.aws.redhat.com cloud-init[864]: -----BEGIN SSH HOST KEY FINGERPRINTS----- Sep 25 06:34:52 ip-10-31-13-238.us-east-1.aws.redhat.com cloud-init[869]: 1024 SHA256:wc5WrSpgWtQs+2EhiAWc8du3KP14hwlaBgrPE39j4gI root@ip-10-31-13-238.us-east-1.aws.redhat.com (DSA) Sep 25 06:34:52 ip-10-31-13-238.us-east-1.aws.redhat.com cloud-init[877]: 256 SHA256:JVvOpjStzqyke3D+fAVZH05fZJMrO9Kp5DEJnFDW29k root@ip-10-31-13-238.us-east-1.aws.redhat.com (ECDSA) Sep 25 06:34:52 ip-10-31-13-238.us-east-1.aws.redhat.com cloud-init[882]: 256 SHA256:oQYaYd4Lna+LIsH7uNrNt0+zQS2v/atrUShZ6dYBOko root@ip-10-31-13-238.us-east-1.aws.redhat.com (ED25519) Sep 25 06:34:52 ip-10-31-13-238.us-east-1.aws.redhat.com cloud-init[889]: 3072 SHA256:w/zbSb1F+owKVubKh1hy5ds+Iz0rpyesexjXrIXXxRY root@ip-10-31-13-238.us-east-1.aws.redhat.com (RSA) Sep 25 06:34:52 ip-10-31-13-238.us-east-1.aws.redhat.com cloud-init[891]: -----END SSH HOST KEY FINGERPRINTS----- Sep 25 06:34:52 ip-10-31-13-238.us-east-1.aws.redhat.com cloud-init[894]: ############################################################# Sep 25 06:34:52 ip-10-31-13-238.us-east-1.aws.redhat.com cloud-init[857]: Cloud-init v. 23.4-19.el9 finished at Wed, 25 Sep 2024 10:34:52 +0000. Datasource DataSourceEc2Local. Up 33.30 seconds Sep 25 06:34:52 ip-10-31-13-238.us-east-1.aws.redhat.com kdumpctl[793]: kdump: Detected change(s) in the following file(s): /etc/fstab Sep 25 06:34:52 ip-10-31-13-238.us-east-1.aws.redhat.com systemd[1]: Finished Execute cloud user/final scripts. ░░ Subject: A start job for unit cloud-final.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit cloud-final.service has finished successfully. ░░ ░░ The job identifier is 223. Sep 25 06:34:52 ip-10-31-13-238.us-east-1.aws.redhat.com systemd[1]: Reached target Cloud-init target. ░░ Subject: A start job for unit cloud-init.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit cloud-init.target has finished successfully. ░░ ░░ The job identifier is 212. Sep 25 06:34:54 ip-10-31-13-238.us-east-1.aws.redhat.com chronyd[592]: Selected source 10.2.32.38 Sep 25 06:34:54 ip-10-31-13-238.us-east-1.aws.redhat.com chronyd[592]: System clock TAI offset set to 37 seconds Sep 25 06:34:56 ip-10-31-13-238.us-east-1.aws.redhat.com kernel: block xvda: the capability attribute has been deprecated. Sep 25 06:34:56 ip-10-31-13-238.us-east-1.aws.redhat.com kdumpctl[793]: kdump: Rebuilding /boot/initramfs-5.14.0-511.el9.x86_64kdump.img Sep 25 06:34:57 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1415]: dracut-057-70.git20240819.el9 Sep 25 06:34:57 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: Executing: /usr/bin/dracut --add kdumpbase --quiet --hostonly --hostonly-cmdline --hostonly-i18n --hostonly-mode strict --hostonly-nics -o "plymouth resume ifcfg earlykdump" --mount "/dev/disk/by-uuid/ad406aa3-aab4-4a6a-aa73-3e870a6316ae /sysroot xfs rw,relatime,seclabel,attr2,inode64,logbufs=8,logbsize=32k,noquota" --squash-compressor zstd --no-hostonly-default-device -f /boot/initramfs-5.14.0-511.el9.x86_64kdump.img 5.14.0-511.el9.x86_64 Sep 25 06:34:58 ip-10-31-13-238.us-east-1.aws.redhat.com systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state. Sep 25 06:34:59 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: dracut module 'systemd-networkd' will not be installed, because command 'networkctl' could not be found! Sep 25 06:34:59 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: dracut module 'systemd-networkd' will not be installed, because command '/usr/lib/systemd/systemd-networkd' could not be found! Sep 25 06:34:59 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: dracut module 'systemd-networkd' will not be installed, because command '/usr/lib/systemd/systemd-networkd-wait-online' could not be found! Sep 25 06:34:59 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: dracut module 'systemd-resolved' will not be installed, because command 'resolvectl' could not be found! Sep 25 06:34:59 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: dracut module 'systemd-resolved' will not be installed, because command '/usr/lib/systemd/systemd-resolved' could not be found! Sep 25 06:34:59 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: dracut module 'systemd-timesyncd' will not be installed, because command '/usr/lib/systemd/systemd-timesyncd' could not be found! Sep 25 06:34:59 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: dracut module 'systemd-timesyncd' will not be installed, because command '/usr/lib/systemd/systemd-time-wait-sync' could not be found! Sep 25 06:34:59 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: dracut module 'busybox' will not be installed, because command 'busybox' could not be found! Sep 25 06:34:59 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: dracut module 'dbus-daemon' will not be installed, because command 'dbus-daemon' could not be found! Sep 25 06:34:59 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: dracut module 'connman' will not be installed, because command 'connmand' could not be found! Sep 25 06:34:59 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: dracut module 'connman' will not be installed, because command 'connmanctl' could not be found! Sep 25 06:34:59 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: dracut module 'connman' will not be installed, because command 'connmand-wait-online' could not be found! Sep 25 06:34:59 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: dracut module 'network-wicked' will not be installed, because command 'wicked' could not be found! Sep 25 06:34:59 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: Module 'ifcfg' will not be installed, because it's in the list to be omitted! Sep 25 06:34:59 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: Module 'plymouth' will not be installed, because it's in the list to be omitted! Sep 25 06:34:59 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: 62bluetooth: Could not find any command of '/usr/lib/bluetooth/bluetoothd /usr/libexec/bluetooth/bluetoothd'! Sep 25 06:34:59 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: dracut module 'lvmmerge' will not be installed, because command 'lvm' could not be found! Sep 25 06:34:59 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: dracut module 'lvmthinpool-monitor' will not be installed, because command 'lvm' could not be found! Sep 25 06:34:59 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: dracut module 'btrfs' will not be installed, because command 'btrfs' could not be found! Sep 25 06:34:59 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: dracut module 'dmraid' will not be installed, because command 'dmraid' could not be found! Sep 25 06:34:59 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: dracut module 'lvm' will not be installed, because command 'lvm' could not be found! Sep 25 06:34:59 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: dracut module 'mdraid' will not be installed, because command 'mdadm' could not be found! Sep 25 06:34:59 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: dracut module 'pcsc' will not be installed, because command 'pcscd' could not be found! Sep 25 06:34:59 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: dracut module 'tpm2-tss' will not be installed, because command 'tpm2' could not be found! Sep 25 06:34:59 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: dracut module 'cifs' will not be installed, because command 'mount.cifs' could not be found! Sep 25 06:34:59 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: dracut module 'iscsi' will not be installed, because command 'iscsi-iname' could not be found! Sep 25 06:34:59 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: dracut module 'iscsi' will not be installed, because command 'iscsiadm' could not be found! Sep 25 06:34:59 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: dracut module 'iscsi' will not be installed, because command 'iscsid' could not be found! Sep 25 06:34:59 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: dracut module 'nvmf' will not be installed, because command 'nvme' could not be found! Sep 25 06:34:59 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: Module 'resume' will not be installed, because it's in the list to be omitted! Sep 25 06:34:59 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: dracut module 'biosdevname' will not be installed, because command 'biosdevname' could not be found! Sep 25 06:34:59 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: Module 'earlykdump' will not be installed, because it's in the list to be omitted! Sep 25 06:34:59 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: dracut module 'memstrack' will not be installed, because command 'memstrack' could not be found! Sep 25 06:34:59 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: memstrack is not available Sep 25 06:34:59 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: If you need to use rd.memdebug>=4, please install memstrack and procps-ng Sep 25 06:35:00 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: dracut module 'systemd-resolved' will not be installed, because command 'resolvectl' could not be found! Sep 25 06:35:00 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: dracut module 'systemd-resolved' will not be installed, because command '/usr/lib/systemd/systemd-resolved' could not be found! Sep 25 06:35:00 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: dracut module 'systemd-timesyncd' will not be installed, because command '/usr/lib/systemd/systemd-timesyncd' could not be found! Sep 25 06:35:00 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: dracut module 'systemd-timesyncd' will not be installed, because command '/usr/lib/systemd/systemd-time-wait-sync' could not be found! Sep 25 06:35:00 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: dracut module 'busybox' will not be installed, because command 'busybox' could not be found! Sep 25 06:35:00 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: dracut module 'dbus-daemon' will not be installed, because command 'dbus-daemon' could not be found! Sep 25 06:35:00 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: dracut module 'connman' will not be installed, because command 'connmand' could not be found! Sep 25 06:35:00 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: dracut module 'connman' will not be installed, because command 'connmanctl' could not be found! Sep 25 06:35:00 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: dracut module 'connman' will not be installed, because command 'connmand-wait-online' could not be found! Sep 25 06:35:00 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: dracut module 'network-wicked' will not be installed, because command 'wicked' could not be found! Sep 25 06:35:00 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: 62bluetooth: Could not find any command of '/usr/lib/bluetooth/bluetoothd /usr/libexec/bluetooth/bluetoothd'! Sep 25 06:35:00 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: dracut module 'lvmmerge' will not be installed, because command 'lvm' could not be found! Sep 25 06:35:00 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: dracut module 'lvmthinpool-monitor' will not be installed, because command 'lvm' could not be found! Sep 25 06:35:00 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: dracut module 'btrfs' will not be installed, because command 'btrfs' could not be found! Sep 25 06:35:00 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: dracut module 'dmraid' will not be installed, because command 'dmraid' could not be found! Sep 25 06:35:00 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: dracut module 'lvm' will not be installed, because command 'lvm' could not be found! Sep 25 06:35:00 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: dracut module 'mdraid' will not be installed, because command 'mdadm' could not be found! Sep 25 06:35:00 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: dracut module 'pcsc' will not be installed, because command 'pcscd' could not be found! Sep 25 06:35:00 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: dracut module 'tpm2-tss' will not be installed, because command 'tpm2' could not be found! Sep 25 06:35:00 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: dracut module 'cifs' will not be installed, because command 'mount.cifs' could not be found! Sep 25 06:35:00 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: dracut module 'iscsi' will not be installed, because command 'iscsi-iname' could not be found! Sep 25 06:35:00 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: dracut module 'iscsi' will not be installed, because command 'iscsiadm' could not be found! Sep 25 06:35:00 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: dracut module 'iscsi' will not be installed, because command 'iscsid' could not be found! Sep 25 06:35:00 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: dracut module 'nvmf' will not be installed, because command 'nvme' could not be found! Sep 25 06:35:00 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: dracut module 'memstrack' will not be installed, because command 'memstrack' could not be found! Sep 25 06:35:00 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: memstrack is not available Sep 25 06:35:00 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: If you need to use rd.memdebug>=4, please install memstrack and procps-ng Sep 25 06:35:00 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: *** Including module: systemd *** Sep 25 06:35:00 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: *** Including module: systemd-initrd *** Sep 25 06:35:00 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: *** Including module: nss-softokn *** Sep 25 06:35:00 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: *** Including module: rngd *** Sep 25 06:35:00 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: *** Including module: i18n *** Sep 25 06:35:00 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: *** Including module: drm *** Sep 25 06:35:01 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: *** Including module: prefixdevname *** Sep 25 06:35:01 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: *** Including module: kernel-modules *** Sep 25 06:35:02 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: *** Including module: kernel-modules-extra *** Sep 25 06:35:02 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: kernel-modules-extra: configuration source "/run/depmod.d" does not exist Sep 25 06:35:02 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: kernel-modules-extra: configuration source "/lib/depmod.d" does not exist Sep 25 06:35:02 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: kernel-modules-extra: parsing configuration file "/etc/depmod.d/dist.conf" Sep 25 06:35:02 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: kernel-modules-extra: /etc/depmod.d/dist.conf: added "updates extra built-in weak-updates" to the list of search directories Sep 25 06:35:02 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: *** Including module: fstab-sys *** Sep 25 06:35:02 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: *** Including module: rootfs-block *** Sep 25 06:35:02 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: *** Including module: terminfo *** Sep 25 06:35:02 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: *** Including module: udev-rules *** Sep 25 06:35:02 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: Skipping udev rule: 91-permissions.rules Sep 25 06:35:02 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: Skipping udev rule: 80-drivers-modprobe.rules Sep 25 06:35:02 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: *** Including module: dracut-systemd *** Sep 25 06:35:02 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: *** Including module: usrmount *** Sep 25 06:35:02 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: *** Including module: base *** Sep 25 06:35:02 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: *** Including module: fs-lib *** Sep 25 06:35:02 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: *** Including module: kdumpbase *** Sep 25 06:35:03 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: *** Including module: microcode_ctl-fw_dir_override *** Sep 25 06:35:03 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: microcode_ctl module: mangling fw_dir Sep 25 06:35:03 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: microcode_ctl: reset fw_dir to "/lib/firmware/updates /lib/firmware" Sep 25 06:35:03 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: microcode_ctl: processing data directory "/usr/share/microcode_ctl/ucode_with_caveats/intel"... Sep 25 06:35:03 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: microcode_ctl: intel: caveats check for kernel version "5.14.0-511.el9.x86_64" passed, adding "/usr/share/microcode_ctl/ucode_with_caveats/intel" to fw_dir variable Sep 25 06:35:03 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: microcode_ctl: processing data directory "/usr/share/microcode_ctl/ucode_with_caveats/intel-06-2d-07"... Sep 25 06:35:03 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: microcode_ctl: configuration "intel-06-2d-07" is ignored Sep 25 06:35:03 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: microcode_ctl: processing data directory "/usr/share/microcode_ctl/ucode_with_caveats/intel-06-4e-03"... Sep 25 06:35:03 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: microcode_ctl: configuration "intel-06-4e-03" is ignored Sep 25 06:35:03 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: microcode_ctl: processing data directory "/usr/share/microcode_ctl/ucode_with_caveats/intel-06-4f-01"... Sep 25 06:35:03 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: microcode_ctl: configuration "intel-06-4f-01" is ignored Sep 25 06:35:03 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: microcode_ctl: processing data directory "/usr/share/microcode_ctl/ucode_with_caveats/intel-06-55-04"... Sep 25 06:35:03 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: microcode_ctl: configuration "intel-06-55-04" is ignored Sep 25 06:35:03 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: microcode_ctl: processing data directory "/usr/share/microcode_ctl/ucode_with_caveats/intel-06-5e-03"... Sep 25 06:35:03 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: microcode_ctl: configuration "intel-06-5e-03" is ignored Sep 25 06:35:03 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: microcode_ctl: processing data directory "/usr/share/microcode_ctl/ucode_with_caveats/intel-06-8c-01"... Sep 25 06:35:03 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: microcode_ctl: configuration "intel-06-8c-01" is ignored Sep 25 06:35:03 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: microcode_ctl: processing data directory "/usr/share/microcode_ctl/ucode_with_caveats/intel-06-8e-9e-0x-0xca"... Sep 25 06:35:03 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: microcode_ctl: configuration "intel-06-8e-9e-0x-0xca" is ignored Sep 25 06:35:03 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: microcode_ctl: processing data directory "/usr/share/microcode_ctl/ucode_with_caveats/intel-06-8e-9e-0x-dell"... Sep 25 06:35:03 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: microcode_ctl: configuration "intel-06-8e-9e-0x-dell" is ignored Sep 25 06:35:03 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: microcode_ctl: final fw_dir: "/usr/share/microcode_ctl/ucode_with_caveats/intel /lib/firmware/updates /lib/firmware" Sep 25 06:35:03 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: *** Including module: shutdown *** Sep 25 06:35:03 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: *** Including module: squash *** Sep 25 06:35:03 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: *** Including modules done *** Sep 25 06:35:04 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: *** Installing kernel module dependencies *** Sep 25 06:35:04 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: *** Installing kernel module dependencies done *** Sep 25 06:35:04 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: *** Resolving executable dependencies *** Sep 25 06:35:05 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: *** Resolving executable dependencies done *** Sep 25 06:35:05 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: *** Hardlinking files *** Sep 25 06:35:05 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: Mode: real Sep 25 06:35:05 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: Files: 433 Sep 25 06:35:05 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: Linked: 1 files Sep 25 06:35:05 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: Compared: 0 xattrs Sep 25 06:35:05 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: Compared: 7 files Sep 25 06:35:05 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: Saved: 56.15 KiB Sep 25 06:35:05 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: Duration: 0.006457 seconds Sep 25 06:35:05 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: *** Hardlinking files done *** Sep 25 06:35:05 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: *** Generating early-microcode cpio image *** Sep 25 06:35:05 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: *** Constructing GenuineIntel.bin *** Sep 25 06:35:06 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: *** Constructing GenuineIntel.bin *** Sep 25 06:35:06 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: *** Store current command line parameters *** Sep 25 06:35:06 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: Stored kernel commandline: Sep 25 06:35:06 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: No dracut internal kernel commandline stored in the initramfs Sep 25 06:35:06 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: *** Install squash loader *** Sep 25 06:35:06 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: *** Stripping files *** Sep 25 06:35:07 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: *** Stripping files done *** Sep 25 06:35:07 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: *** Squashing the files inside the initramfs *** Sep 25 06:35:12 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: *** Squashing the files inside the initramfs done *** Sep 25 06:35:12 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: *** Creating image file '/boot/initramfs-5.14.0-511.el9.x86_64kdump.img' *** Sep 25 06:35:13 ip-10-31-13-238.us-east-1.aws.redhat.com dracut[1417]: *** Creating initramfs image file '/boot/initramfs-5.14.0-511.el9.x86_64kdump.img' done *** Sep 25 06:35:13 ip-10-31-13-238.us-east-1.aws.redhat.com kdumpctl[793]: kdump: kexec: loaded kdump kernel Sep 25 06:35:13 ip-10-31-13-238.us-east-1.aws.redhat.com kdumpctl[793]: kdump: Starting kdump: [OK] Sep 25 06:35:13 ip-10-31-13-238.us-east-1.aws.redhat.com systemd[1]: Finished Crash recovery kernel arming. ░░ Subject: A start job for unit kdump.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit kdump.service has finished successfully. ░░ ░░ The job identifier is 232. Sep 25 06:35:13 ip-10-31-13-238.us-east-1.aws.redhat.com systemd[1]: Startup finished in 1.025s (kernel) + 7.748s (initrd) + 46.002s (userspace) = 54.777s. ░░ Subject: System start-up is now complete ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ All system services necessary queued for starting at boot have been ░░ started. Note that this does not mean that the machine is now idle as services ░░ might still be busy with completing start-up. ░░ ░░ Kernel start-up required 1025762 microseconds. ░░ ░░ Initrd start-up required 7748701 microseconds. ░░ ░░ Userspace start-up required 46002675 microseconds. Sep 25 06:35:18 ip-10-31-13-238.us-east-1.aws.redhat.com systemd[1]: systemd-hostnamed.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-hostnamed.service has successfully entered the 'dead' state. Sep 25 06:36:04 ip-10-31-13-238.us-east-1.aws.redhat.com sshd[4047]: Accepted publickey for root from 10.30.32.160 port 50782 ssh2: RSA SHA256:W3cSdmPJK+d9RwU97ardijPXIZnxHswrpTHWW9oYtEU Sep 25 06:36:04 ip-10-31-13-238.us-east-1.aws.redhat.com systemd[1]: Created slice User Slice of UID 0. ░░ Subject: A start job for unit user-0.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user-0.slice has finished successfully. ░░ ░░ The job identifier is 551. Sep 25 06:36:04 ip-10-31-13-238.us-east-1.aws.redhat.com systemd[1]: Starting User Runtime Directory /run/user/0... ░░ Subject: A start job for unit user-runtime-dir@0.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user-runtime-dir@0.service has begun execution. ░░ ░░ The job identifier is 550. Sep 25 06:36:04 ip-10-31-13-238.us-east-1.aws.redhat.com systemd-logind[585]: New session 1 of user root. ░░ Subject: A new session 1 has been created for user root ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A new session with the ID 1 has been created for the user root. ░░ ░░ The leading process of the session is 4047. Sep 25 06:36:04 ip-10-31-13-238.us-east-1.aws.redhat.com systemd[1]: Finished User Runtime Directory /run/user/0. ░░ Subject: A start job for unit user-runtime-dir@0.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user-runtime-dir@0.service has finished successfully. ░░ ░░ The job identifier is 550. Sep 25 06:36:04 ip-10-31-13-238.us-east-1.aws.redhat.com systemd[1]: Starting User Manager for UID 0... ░░ Subject: A start job for unit user@0.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user@0.service has begun execution. ░░ ░░ The job identifier is 549. Sep 25 06:36:04 ip-10-31-13-238.us-east-1.aws.redhat.com systemd[4051]: pam_unix(systemd-user:session): session opened for user root(uid=0) by root(uid=0) Sep 25 06:36:04 ip-10-31-13-238.us-east-1.aws.redhat.com systemd[4051]: Queued start job for default target Main User Target. Sep 25 06:36:04 ip-10-31-13-238.us-east-1.aws.redhat.com systemd[4051]: Created slice User Application Slice. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 4. Sep 25 06:36:04 ip-10-31-13-238.us-east-1.aws.redhat.com systemd[4051]: Mark boot as successful after the user session has run 2 minutes was skipped because of an unmet condition check (ConditionUser=!@system). ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 8. Sep 25 06:36:04 ip-10-31-13-238.us-east-1.aws.redhat.com systemd[4051]: Started Daily Cleanup of User's Temporary Directories. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 9. Sep 25 06:36:04 ip-10-31-13-238.us-east-1.aws.redhat.com systemd[4051]: Reached target Paths. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 10. Sep 25 06:36:04 ip-10-31-13-238.us-east-1.aws.redhat.com systemd[4051]: Reached target Timers. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 7. Sep 25 06:36:04 ip-10-31-13-238.us-east-1.aws.redhat.com systemd[4051]: Starting D-Bus User Message Bus Socket... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 12. Sep 25 06:36:04 ip-10-31-13-238.us-east-1.aws.redhat.com systemd[4051]: Starting Create User's Volatile Files and Directories... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 3. Sep 25 06:36:04 ip-10-31-13-238.us-east-1.aws.redhat.com systemd[4051]: Finished Create User's Volatile Files and Directories. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 3. Sep 25 06:36:04 ip-10-31-13-238.us-east-1.aws.redhat.com systemd[4051]: Listening on D-Bus User Message Bus Socket. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 12. Sep 25 06:36:04 ip-10-31-13-238.us-east-1.aws.redhat.com systemd[4051]: Reached target Sockets. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 11. Sep 25 06:36:04 ip-10-31-13-238.us-east-1.aws.redhat.com systemd[4051]: Reached target Basic System. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 2. Sep 25 06:36:04 ip-10-31-13-238.us-east-1.aws.redhat.com systemd[4051]: Reached target Main User Target. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 1. Sep 25 06:36:04 ip-10-31-13-238.us-east-1.aws.redhat.com systemd[4051]: Startup finished in 141ms. ░░ Subject: User manager start-up is now complete ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The user manager instance for user 0 has been started. All services queued ░░ for starting have been started. Note that other services might still be starting ░░ up or be started at any later time. ░░ ░░ Startup of the manager took 141649 microseconds. Sep 25 06:36:04 ip-10-31-13-238.us-east-1.aws.redhat.com systemd[1]: Started User Manager for UID 0. ░░ Subject: A start job for unit user@0.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user@0.service has finished successfully. ░░ ░░ The job identifier is 549. Sep 25 06:36:04 ip-10-31-13-238.us-east-1.aws.redhat.com systemd[1]: Started Session 1 of User root. ░░ Subject: A start job for unit session-1.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit session-1.scope has finished successfully. ░░ ░░ The job identifier is 615. Sep 25 06:36:04 ip-10-31-13-238.us-east-1.aws.redhat.com sshd[4047]: pam_unix(sshd:session): session opened for user root(uid=0) by root(uid=0) Sep 25 06:36:05 ip-10-31-13-238.us-east-1.aws.redhat.com sshd[4060]: Received disconnect from 10.30.32.160 port 50782:11: disconnected by user Sep 25 06:36:05 ip-10-31-13-238.us-east-1.aws.redhat.com sshd[4060]: Disconnected from user root 10.30.32.160 port 50782 Sep 25 06:36:05 ip-10-31-13-238.us-east-1.aws.redhat.com sshd[4047]: pam_unix(sshd:session): session closed for user root Sep 25 06:36:05 ip-10-31-13-238.us-east-1.aws.redhat.com systemd[1]: session-1.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit session-1.scope has successfully entered the 'dead' state. Sep 25 06:36:05 ip-10-31-13-238.us-east-1.aws.redhat.com systemd-logind[585]: Session 1 logged out. Waiting for processes to exit. Sep 25 06:36:05 ip-10-31-13-238.us-east-1.aws.redhat.com systemd-logind[585]: Removed session 1. ░░ Subject: Session 1 has been terminated ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A session with the ID 1 has been terminated. Sep 25 06:36:07 ip-10-31-13-238.us-east-1.aws.redhat.com sshd[4090]: Accepted publickey for root from 10.31.9.217 port 39604 ssh2: RSA SHA256:W3cSdmPJK+d9RwU97ardijPXIZnxHswrpTHWW9oYtEU Sep 25 06:36:07 ip-10-31-13-238.us-east-1.aws.redhat.com sshd[4089]: Accepted publickey for root from 10.31.9.217 port 39598 ssh2: RSA SHA256:W3cSdmPJK+d9RwU97ardijPXIZnxHswrpTHWW9oYtEU Sep 25 06:36:07 ip-10-31-13-238.us-east-1.aws.redhat.com systemd-logind[585]: New session 3 of user root. ░░ Subject: A new session 3 has been created for user root ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A new session with the ID 3 has been created for the user root. ░░ ░░ The leading process of the session is 4090. Sep 25 06:36:07 ip-10-31-13-238.us-east-1.aws.redhat.com systemd[1]: Started Session 3 of User root. ░░ Subject: A start job for unit session-3.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit session-3.scope has finished successfully. ░░ ░░ The job identifier is 682. Sep 25 06:36:07 ip-10-31-13-238.us-east-1.aws.redhat.com systemd-logind[585]: New session 4 of user root. ░░ Subject: A new session 4 has been created for user root ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A new session with the ID 4 has been created for the user root. ░░ ░░ The leading process of the session is 4089. Sep 25 06:36:07 ip-10-31-13-238.us-east-1.aws.redhat.com systemd[1]: Started Session 4 of User root. ░░ Subject: A start job for unit session-4.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit session-4.scope has finished successfully. ░░ ░░ The job identifier is 749. Sep 25 06:36:07 ip-10-31-13-238.us-east-1.aws.redhat.com sshd[4090]: pam_unix(sshd:session): session opened for user root(uid=0) by root(uid=0) Sep 25 06:36:07 ip-10-31-13-238.us-east-1.aws.redhat.com sshd[4089]: pam_unix(sshd:session): session opened for user root(uid=0) by root(uid=0) Sep 25 06:36:07 ip-10-31-13-238.us-east-1.aws.redhat.com sshd[4095]: Received disconnect from 10.31.9.217 port 39604:11: disconnected by user Sep 25 06:36:07 ip-10-31-13-238.us-east-1.aws.redhat.com sshd[4095]: Disconnected from user root 10.31.9.217 port 39604 Sep 25 06:36:07 ip-10-31-13-238.us-east-1.aws.redhat.com sshd[4090]: pam_unix(sshd:session): session closed for user root Sep 25 06:36:07 ip-10-31-13-238.us-east-1.aws.redhat.com systemd[1]: session-3.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit session-3.scope has successfully entered the 'dead' state. Sep 25 06:36:07 ip-10-31-13-238.us-east-1.aws.redhat.com systemd-logind[585]: Session 3 logged out. Waiting for processes to exit. Sep 25 06:36:07 ip-10-31-13-238.us-east-1.aws.redhat.com systemd-logind[585]: Removed session 3. ░░ Subject: Session 3 has been terminated ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A session with the ID 3 has been terminated. Sep 25 06:37:14 ip-10-31-13-238.us-east-1.aws.redhat.com systemd[1]: Starting Hostname Service... ░░ Subject: A start job for unit systemd-hostnamed.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-hostnamed.service has begun execution. ░░ ░░ The job identifier is 817. Sep 25 06:37:14 ip-10-31-13-238.us-east-1.aws.redhat.com systemd[1]: Started Hostname Service. ░░ Subject: A start job for unit systemd-hostnamed.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-hostnamed.service has finished successfully. ░░ ░░ The job identifier is 817. Sep 25 06:37:14 managed-node3 systemd-hostnamed[5454]: Hostname set to (static) Sep 25 06:37:14 managed-node3 NetworkManager[620]: [1727260634.6864] hostname: static hostname changed from "ip-10-31-13-238.us-east-1.aws.redhat.com" to "managed-node3" Sep 25 06:37:14 managed-node3 systemd[1]: Starting Network Manager Script Dispatcher Service... ░░ Subject: A start job for unit NetworkManager-dispatcher.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has begun execution. ░░ ░░ The job identifier is 881. Sep 25 06:37:14 managed-node3 systemd[1]: Started Network Manager Script Dispatcher Service. ░░ Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has finished successfully. ░░ ░░ The job identifier is 881. Sep 25 06:37:24 managed-node3 systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state. Sep 25 06:37:44 managed-node3 systemd[1]: systemd-hostnamed.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-hostnamed.service has successfully entered the 'dead' state. Sep 25 06:38:28 managed-node3 sshd[6147]: Accepted publickey for root from 10.31.12.145 port 51682 ssh2: RSA SHA256:9j1blwt3wcrRiGYZQ7ZGu9axm3cDklH6/z4c+Ee8CzE Sep 25 06:38:28 managed-node3 systemd-logind[585]: New session 5 of user root. ░░ Subject: A new session 5 has been created for user root ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A new session with the ID 5 has been created for the user root. ░░ ░░ The leading process of the session is 6147. Sep 25 06:38:28 managed-node3 systemd[1]: Started Session 5 of User root. ░░ Subject: A start job for unit session-5.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit session-5.scope has finished successfully. ░░ ░░ The job identifier is 945. Sep 25 06:38:28 managed-node3 sshd[6147]: pam_unix(sshd:session): session opened for user root(uid=0) by root(uid=0) Sep 25 06:38:30 managed-node3 python3.9[6275]: ansible-ansible.legacy.setup Invoked with gather_subset=['all'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d Sep 25 06:38:31 managed-node3 python3.9[6408]: ansible-service_facts Invoked Sep 25 06:38:33 managed-node3 python3.9[6599]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Sep 25 06:38:34 managed-node3 python3.9[6706]: ansible-ansible.legacy.dnf Invoked with name=['pcp', 'pcp-zeroconf'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Sep 25 06:38:52 managed-node3 kernel: SELinux: Converting 375 SID table entries... Sep 25 06:38:52 managed-node3 kernel: SELinux: policy capability network_peer_controls=1 Sep 25 06:38:52 managed-node3 kernel: SELinux: policy capability open_perms=1 Sep 25 06:38:52 managed-node3 kernel: SELinux: policy capability extended_socket_class=1 Sep 25 06:38:52 managed-node3 kernel: SELinux: policy capability always_check_network=0 Sep 25 06:38:52 managed-node3 kernel: SELinux: policy capability cgroup_seclabel=1 Sep 25 06:38:52 managed-node3 kernel: SELinux: policy capability nnp_nosuid_transition=1 Sep 25 06:38:52 managed-node3 kernel: SELinux: policy capability genfs_seclabel_symlinks=1 Sep 25 06:38:55 managed-node3 dbus-broker-launch[579]: avc: op=load_policy lsm=selinux seqno=2 res=1 Sep 25 06:38:56 managed-node3 systemd[1]: Starting PCP Reboot Initialization Helper Service... ░░ Subject: A start job for unit pcp-reboot-init.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pcp-reboot-init.service has begun execution. ░░ ░░ The job identifier is 1012. Sep 25 06:38:56 managed-node3 systemd[1]: Starting Performance Metrics Collector Daemon... ░░ Subject: A start job for unit pmcd.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmcd.service has begun execution. ░░ ░░ The job identifier is 1015. Sep 25 06:38:56 managed-node3 systemd[1]: Finished PCP Reboot Initialization Helper Service. ░░ Subject: A start job for unit pcp-reboot-init.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pcp-reboot-init.service has finished successfully. ░░ ░░ The job identifier is 1012. Sep 25 06:38:56 managed-node3 systemd[1]: Started Performance Metrics Collector Daemon. ░░ Subject: A start job for unit pmcd.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmcd.service has finished successfully. ░░ ░░ The job identifier is 1015. Sep 25 06:38:56 managed-node3 systemd[1]: Starting Performance Metrics Inference Engine... ░░ Subject: A start job for unit pmie.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmie.service has begun execution. ░░ ░░ The job identifier is 1146. Sep 25 06:38:56 managed-node3 systemd[1]: Starting Performance Metrics Archive Logger... ░░ Subject: A start job for unit pmlogger.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmlogger.service has begun execution. ░░ ░░ The job identifier is 1078. Sep 25 06:38:56 managed-node3 pmcd[7606]: Installing dm PMDA ... Sep 25 06:38:56 managed-node3 rc[7463]: /etc/pcp/pmie/rc: Warning: Performance Co-Pilot Inference Engine (pmie) not permanently enabled. Sep 25 06:38:56 managed-node3 rc[7469]: /etc/pcp/pmlogger/rc: Warning: Performance Co-Pilot archive logger(s) not permanently enabled. Sep 25 06:38:56 managed-node3 rc[7463]: To enable pmie, run the following as root: Sep 25 06:38:56 managed-node3 rc[7463]: # /usr/bin/systemctl enable pmie.service Sep 25 06:38:56 managed-node3 rc[7469]: To enable pmlogger, run the following as root: Sep 25 06:38:56 managed-node3 rc[7469]: # /usr/bin/systemctl enable pmlogger.service Sep 25 06:38:56 managed-node3 systemd[1]: Started Performance Metrics Inference Engine. ░░ Subject: A start job for unit pmie.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmie.service has finished successfully. ░░ ░░ The job identifier is 1146. Sep 25 06:38:56 managed-node3 systemd[1]: Started Half-hourly check of PMIE instances. ░░ Subject: A start job for unit pmie_check.timer has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmie_check.timer has finished successfully. ░░ ░░ The job identifier is 1209. Sep 25 06:38:56 managed-node3 systemd[1]: Started Daily processing of PMIE logs. ░░ Subject: A start job for unit pmie_daily.timer has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmie_daily.timer has finished successfully. ░░ ░░ The job identifier is 1212. Sep 25 06:38:56 managed-node3 systemd[1]: Starting pmie farm service... ░░ Subject: A start job for unit pmie_farm.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmie_farm.service has begun execution. ░░ ░░ The job identifier is 1210. Sep 25 06:38:56 managed-node3 systemd[1]: Starting Check PMIE instances are running... ░░ Subject: A start job for unit pmie_check.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmie_check.service has begun execution. ░░ ░░ The job identifier is 1214. Sep 25 06:38:56 managed-node3 systemd[1]: Started pmie farm service. ░░ Subject: A start job for unit pmie_farm.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmie_farm.service has finished successfully. ░░ ░░ The job identifier is 1210. Sep 25 06:38:56 managed-node3 systemd[1]: Started Half-hourly check of pmie farm instances. ░░ Subject: A start job for unit pmie_farm_check.timer has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmie_farm_check.timer has finished successfully. ░░ ░░ The job identifier is 1211. Sep 25 06:38:56 managed-node3 systemd[1]: Starting Check and migrate non-primary pmie farm instances... ░░ Subject: A start job for unit pmie_farm_check.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmie_farm_check.service has begun execution. ░░ ░░ The job identifier is 1277. Sep 25 06:38:56 managed-node3 systemd[1]: Started Check and migrate non-primary pmie farm instances. ░░ Subject: A start job for unit pmie_farm_check.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmie_farm_check.service has finished successfully. ░░ ░░ The job identifier is 1277. Sep 25 06:38:56 managed-node3 systemd[1]: Started Check PMIE instances are running. ░░ Subject: A start job for unit pmie_check.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmie_check.service has finished successfully. ░░ ░░ The job identifier is 1214. Sep 25 06:38:57 managed-node3 systemd[1]: pmie_farm_check.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit pmie_farm_check.service has successfully entered the 'dead' state. Sep 25 06:38:57 managed-node3 systemd[1]: pmie_check.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit pmie_check.service has successfully entered the 'dead' state. Sep 25 06:38:57 managed-node3 systemd[1]: Started Performance Metrics Archive Logger. ░░ Subject: A start job for unit pmlogger.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmlogger.service has finished successfully. ░░ ░░ The job identifier is 1078. Sep 25 06:38:57 managed-node3 systemd[1]: Started Half-hourly check of pmlogger instances. ░░ Subject: A start job for unit pmlogger_check.timer has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmlogger_check.timer has finished successfully. ░░ ░░ The job identifier is 1079. Sep 25 06:38:57 managed-node3 systemd[1]: Started Daily processing of archives. ░░ Subject: A start job for unit pmlogger_daily.timer has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmlogger_daily.timer has finished successfully. ░░ ░░ The job identifier is 1142. Sep 25 06:38:57 managed-node3 systemd[1]: Starting pmlogger farm service... ░░ Subject: A start job for unit pmlogger_farm.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmlogger_farm.service has begun execution. ░░ ░░ The job identifier is 1143. Sep 25 06:38:57 managed-node3 systemd[1]: Starting Check pmlogger instances are running... ░░ Subject: A start job for unit pmlogger_check.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmlogger_check.service has begun execution. ░░ ░░ The job identifier is 1340. Sep 25 06:38:57 managed-node3 systemd[1]: Started pmlogger farm service. ░░ Subject: A start job for unit pmlogger_farm.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmlogger_farm.service has finished successfully. ░░ ░░ The job identifier is 1143. Sep 25 06:38:57 managed-node3 systemd[1]: Started Check pmlogger instances are running. ░░ Subject: A start job for unit pmlogger_check.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmlogger_check.service has finished successfully. ░░ ░░ The job identifier is 1340. Sep 25 06:38:57 managed-node3 systemd[1]: Started Half-hourly check of pmlogger farm instances. ░░ Subject: A start job for unit pmlogger_farm_check.timer has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmlogger_farm_check.timer has finished successfully. ░░ ░░ The job identifier is 1144. Sep 25 06:38:57 managed-node3 systemd[1]: Reloading. Sep 25 06:38:57 managed-node3 systemd-rc-local-generator[8544]: /etc/rc.d/rc.local is not marked executable, skipping. Sep 25 06:38:58 managed-node3 systemd[1]: Starting Check and migrate non-primary pmlogger farm instances... ░░ Subject: A start job for unit pmlogger_farm_check.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmlogger_farm_check.service has begun execution. ░░ ░░ The job identifier is 1403. Sep 25 06:38:58 managed-node3 systemd[1]: Started Check and migrate non-primary pmlogger farm instances. ░░ Subject: A start job for unit pmlogger_farm_check.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmlogger_farm_check.service has finished successfully. ░░ ░░ The job identifier is 1403. Sep 25 06:38:58 managed-node3 systemd[1]: pmlogger_farm_check.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit pmlogger_farm_check.service has successfully entered the 'dead' state. Sep 25 06:38:59 managed-node3 systemd[1]: Started /usr/bin/systemctl start man-db-cache-update. ░░ Subject: A start job for unit run-rae79caa656eb46d88cdea6b03e93459c.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit run-rae79caa656eb46d88cdea6b03e93459c.service has finished successfully. ░░ ░░ The job identifier is 1466. Sep 25 06:38:59 managed-node3 systemd[1]: Starting man-db-cache-update.service... ░░ Subject: A start job for unit man-db-cache-update.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit man-db-cache-update.service has begun execution. ░░ ░░ The job identifier is 1529. Sep 25 06:38:59 managed-node3 systemd[1]: pmlogger_check.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit pmlogger_check.service has successfully entered the 'dead' state. Sep 25 06:38:59 managed-node3 systemd[1]: Reloading. Sep 25 06:38:59 managed-node3 systemd-rc-local-generator[9536]: /etc/rc.d/rc.local is not marked executable, skipping. Sep 25 06:38:59 managed-node3 systemd[1]: Queuing reload/restart jobs for marked units… Sep 25 06:39:00 managed-node3 python3.9[10633]: ansible-ansible.legacy.dnf Invoked with name=['cyrus-sasl-lib', 'cyrus-sasl-scram'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Sep 25 06:39:03 managed-node3 python3.9[12814]: ansible-ansible.legacy.command Invoked with _raw_params=cat /etc/pcp/pmcd/pmcd.conf _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Sep 25 06:39:03 managed-node3 python3.9[13275]: ansible-file Invoked with path=/etc/pcp/labels state=directory mode=0755 owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:39:04 managed-node3 python3.9[13885]: ansible-file Invoked with path=/etc/pcp/labels/optional state=directory mode=0755 owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:39:04 managed-node3 kernel: device-mapper: core: CONFIG_IMA_DISABLE_HTABLE is disabled. Duplicate IMA measurements will not be recorded in the IMA log. Sep 25 06:39:04 managed-node3 kernel: device-mapper: uevent: version 1.0.3 Sep 25 06:39:04 managed-node3 kernel: device-mapper: ioctl: 4.48.0-ioctl (2023-03-01) initialised: dm-devel@redhat.com Sep 25 06:39:04 managed-node3 pmcd[14295]: Installing nfsclient PMDA ... Sep 25 06:39:04 managed-node3 python3.9[14378]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/labels/ansible-managed follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Sep 25 06:39:04 managed-node3 systemd[1]: man-db-cache-update.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit man-db-cache-update.service has successfully entered the 'dead' state. Sep 25 06:39:04 managed-node3 systemd[1]: Finished man-db-cache-update.service. ░░ Subject: A start job for unit man-db-cache-update.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit man-db-cache-update.service has finished successfully. ░░ ░░ The job identifier is 1529. Sep 25 06:39:04 managed-node3 systemd[1]: man-db-cache-update.service: Consumed 2.921s CPU time. ░░ Subject: Resources consumed by unit runtime ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit man-db-cache-update.service completed and consumed the indicated resources. Sep 25 06:39:04 managed-node3 systemd[1]: run-rae79caa656eb46d88cdea6b03e93459c.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-rae79caa656eb46d88cdea6b03e93459c.service has successfully entered the 'dead' state. Sep 25 06:39:05 managed-node3 python3.9[14552]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1727260744.2891223-7219-206680251957562/.source dest=/etc/pcp/labels/ansible-managed mode=0644 follow=False _original_basename=pmcd.explicit.labels.j2 checksum=5f36b2ea290645ee34d943220a14b54ee5ea5be5 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:39:05 managed-node3 python3.9[14659]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/labels/optional/ansible-managed follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Sep 25 06:39:05 managed-node3 python3.9[14744]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1727260745.1787157-7258-157571553649633/.source dest=/etc/pcp/labels/optional/ansible-managed mode=0644 follow=False _original_basename=pmcd.implicit.labels.j2 checksum=5f36b2ea290645ee34d943220a14b54ee5ea5be5 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:39:06 managed-node3 python3.9[14934]: ansible-ansible.legacy.stat Invoked with path=/etc/sysconfig/pmcd follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Sep 25 06:39:07 managed-node3 python3.9[15031]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1727260745.7762794-7296-150965605849618/.source dest=/etc/sysconfig/pmcd mode=0644 follow=False _original_basename=pmcd.defaults.j2 checksum=7518789c091387cd9c322e1a8fa8aad21d4efbd3 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:39:07 managed-node3 python3.9[15143]: ansible-user Invoked with name=metrics system=True state=present non_unique=False force=False remove=False create_home=True move_home=False append=False ssh_key_bits=0 ssh_key_type=rsa ssh_key_comment=ansible-generated on managed-node3 update_password=always uid=None group=None groups=None comment=None home=None shell=None password=NOT_LOGGING_PARAMETER login_class=None password_expire_max=None password_expire_min=None password_expire_warn=None hidden=None seuser=None skeleton=None generate_ssh_key=None ssh_key_file=None ssh_key_passphrase=NOT_LOGGING_PARAMETER expires=None password_lock=None local=None profile=None authorization=None role=None umask=None Sep 25 06:39:07 managed-node3 useradd[15145]: new group: name=metrics, GID=992 Sep 25 06:39:07 managed-node3 useradd[15145]: new user: name=metrics, UID=992, GID=992, home=/home/metrics, shell=/bin/bash, from=/dev/pts/0 Sep 25 06:39:08 managed-node3 pmcd[15163]: Installing openmetrics PMDA ... Sep 25 06:39:08 managed-node3 pmcd[15212]: [Wed Sep 25 06:39:08] pmdaopenmetrics(15212) Info: Initializing ... currently in notready state. Sep 25 06:39:08 managed-node3 pmcd[15212]: [Wed Sep 25 06:39:08] pmdaopenmetrics(15212) Info: Config change detected, traversed 2 config entries in 0.0001s, rescanning ... Sep 25 06:39:08 managed-node3 pmcd[15212]: [Wed Sep 25 06:39:08] pmdaopenmetrics(15212) Info: Found source grafana cluster 1 Sep 25 06:39:08 managed-node3 pmcd[15212]: [Wed Sep 25 06:39:08] pmdaopenmetrics(15212) Info: Found source kepler cluster 2 Sep 25 06:39:08 managed-node3 pmcd[15212]: [Wed Sep 25 06:39:08] pmdaopenmetrics(15212) Info: Ready to process requests Sep 25 06:39:08 managed-node3 python3.9[15306]: ansible-ansible.legacy.command Invoked with _raw_params=set -eu if set -o | grep -q pipefail; then set -o pipefail # pipefail not supported on debian, some ubuntu fi if ! sasldblistusers2 -f "/etc/pcp/passwd.db" | grep -q "^metrics@"; then echo "Creating new metrics user in /etc/pcp/passwd.db" echo "metrics" | saslpasswd2 -a pmcd "metrics" chown root:pcp "/etc/pcp/passwd.db" chmod 640 "/etc/pcp/passwd.db" fi _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Sep 25 06:39:08 managed-node3 sasldblistusers2[15313]: SASL error opening password file. Have you performed the migration from db2 using cyrusbdb2current? Sep 25 06:39:08 managed-node3 sasldblistusers2[15313]: _sasldb_getkeyhandle has failed Sep 25 06:39:08 managed-node3 pmcd[15307]: [Wed Sep 25 06:39:08] pmdaopenmetrics(15307) Info: Initializing ... currently in notready state. Sep 25 06:39:08 managed-node3 pmcd[15307]: [Wed Sep 25 06:39:08] pmdaopenmetrics(15307) Info: Config change detected, traversed 2 config entries in 0.0001s, rescanning ... Sep 25 06:39:08 managed-node3 pmcd[15307]: [Wed Sep 25 06:39:08] pmdaopenmetrics(15307) Info: Found source grafana cluster 1 Sep 25 06:39:08 managed-node3 pmcd[15307]: [Wed Sep 25 06:39:08] pmdaopenmetrics(15307) Info: Found source kepler cluster 2 Sep 25 06:39:08 managed-node3 pmcd[15307]: [Wed Sep 25 06:39:08] pmdaopenmetrics(15307) Info: Ready to process requests Sep 25 06:39:09 managed-node3 python3.9[15509]: ansible-ansible.legacy.stat Invoked with path=/etc/sasl2/pmcd.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Sep 25 06:39:09 managed-node3 python3.9[15596]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1727260748.809433-7409-39566977557047/.source.conf dest=/etc/sasl2/pmcd.conf mode=0644 follow=False _original_basename=pmcd.sasl2.conf.j2 checksum=615d2de55ab86108da0c7e6b64988fecb4169771 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:39:10 managed-node3 python3.9[15794]: ansible-ansible.legacy.systemd Invoked with name=pmcd state=restarted enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Sep 25 06:39:10 managed-node3 systemd[1]: Stopping Performance Metrics Collector Daemon... ░░ Subject: A stop job for unit pmcd.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit pmcd.service has begun execution. ░░ ░░ The job identifier is 1593. Sep 25 06:39:10 managed-node3 pmcd[15164]: Terminated Sep 25 06:39:10 managed-node3 pmcd[7478]: _pmda_setup: Interrupted! Sep 25 06:39:10 managed-node3 pmcd[7478]: _pmda_setup_cleanup: reset .NeedInstall for openmetrics PMDA Sep 25 06:39:10 managed-node3 systemd[1]: pmcd.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit pmcd.service has successfully entered the 'dead' state. Sep 25 06:39:10 managed-node3 systemd[1]: Stopped Performance Metrics Collector Daemon. ░░ Subject: A stop job for unit pmcd.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit pmcd.service has finished. ░░ ░░ The job identifier is 1593 and the job result is done. Sep 25 06:39:10 managed-node3 systemd[1]: pmcd.service: Consumed 4.079s CPU time. ░░ Subject: Resources consumed by unit runtime ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit pmcd.service completed and consumed the indicated resources. Sep 25 06:39:10 managed-node3 systemd[1]: Starting Performance Metrics Collector Daemon... ░░ Subject: A start job for unit pmcd.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmcd.service has begun execution. ░░ ░░ The job identifier is 1593. Sep 25 06:39:11 managed-node3 systemd[1]: Started Performance Metrics Collector Daemon. ░░ Subject: A start job for unit pmcd.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmcd.service has finished successfully. ░░ ░░ The job identifier is 1593. Sep 25 06:39:11 managed-node3 pmcd[16315]: Installing openmetrics PMDA ... Sep 25 06:39:11 managed-node3 pmcd[16346]: [Wed Sep 25 06:39:11] pmdaopenmetrics(16346) Info: Initializing ... currently in notready state. Sep 25 06:39:11 managed-node3 pmcd[16346]: [Wed Sep 25 06:39:11] pmdaopenmetrics(16346) Info: Config change detected, traversed 2 config entries in 0.0001s, rescanning ... Sep 25 06:39:11 managed-node3 pmcd[16346]: [Wed Sep 25 06:39:11] pmdaopenmetrics(16346) Info: Found source grafana cluster 1 Sep 25 06:39:11 managed-node3 pmcd[16346]: [Wed Sep 25 06:39:11] pmdaopenmetrics(16346) Info: Found source kepler cluster 2 Sep 25 06:39:11 managed-node3 pmcd[16346]: [Wed Sep 25 06:39:11] pmdaopenmetrics(16346) Info: Ready to process requests Sep 25 06:39:11 managed-node3 pmcd[16442]: [Wed Sep 25 06:39:11] pmdaopenmetrics(16442) Info: Initializing ... currently in notready state. Sep 25 06:39:11 managed-node3 pmcd[16442]: [Wed Sep 25 06:39:11] pmdaopenmetrics(16442) Info: Config change detected, traversed 2 config entries in 0.0001s, rescanning ... Sep 25 06:39:11 managed-node3 pmcd[16442]: [Wed Sep 25 06:39:11] pmdaopenmetrics(16442) Info: Found source grafana cluster 1 Sep 25 06:39:11 managed-node3 pmcd[16442]: [Wed Sep 25 06:39:11] pmdaopenmetrics(16442) Info: Found source kepler cluster 2 Sep 25 06:39:11 managed-node3 python3.9[16441]: ansible-file Invoked with path=/etc/pcp/pmieconf/network state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:39:11 managed-node3 pmcd[16442]: [Wed Sep 25 06:39:11] pmdaopenmetrics(16442) Info: Ready to process requests Sep 25 06:39:12 managed-node3 python3.9[16665]: ansible-file Invoked with path=/etc/pcp/pmieconf/power state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:39:12 managed-node3 python3.9[16789]: ansible-file Invoked with path=/etc/pcp/pmieconf/zeroconf state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:39:12 managed-node3 python3.9[16896]: ansible-file Invoked with path=/etc/pcp/pmieconf/filesys state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:39:13 managed-node3 python3.9[17003]: ansible-file Invoked with path=/var/lib/pcp/config/pmieconf/network state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:39:13 managed-node3 python3.9[17170]: ansible-file Invoked with path=/var/lib/pcp/config/pmieconf/power state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:39:13 managed-node3 python3.9[17277]: ansible-file Invoked with path=/var/lib/pcp/config/pmieconf/zeroconf state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:39:14 managed-node3 python3.9[17384]: ansible-file Invoked with path=/var/lib/pcp/config/pmieconf/filesys state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:39:14 managed-node3 python3.9[17567]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/pmieconf/network/tcplistenoverflows follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Sep 25 06:39:14 managed-node3 python3.9[17660]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1727260754.2871108-7642-15904464694277/.source dest=/etc/pcp/pmieconf/network/tcplistenoverflows owner=root group=root mode=0644 _original_basename=tcplistenoverflows follow=False checksum=608d8a6ac6ee33bb86b77d28ba24fbcd378db43d backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:39:15 managed-node3 python3.9[17767]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/pmieconf/network/tcpqfulldocookies follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Sep 25 06:39:15 managed-node3 python3.9[17854]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1727260754.9324775-7642-73536183650550/.source dest=/etc/pcp/pmieconf/network/tcpqfulldocookies owner=root group=root mode=0644 _original_basename=tcpqfulldocookies follow=False checksum=3256a5c2e8d07a20d8e97a08c0ab163252b0beae backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:39:15 managed-node3 python3.9[17966]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/pmieconf/network/tcpqfulldrops follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Sep 25 06:39:16 managed-node3 python3.9[18053]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1727260755.5155172-7642-46575473131572/.source dest=/etc/pcp/pmieconf/network/tcpqfulldrops owner=root group=root mode=0644 _original_basename=tcpqfulldrops follow=False checksum=37b2bd7f2430bd9678ab078c5e69a53bea556524 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:39:16 managed-node3 python3.9[18160]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/pmieconf/power/thermal_throttle follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Sep 25 06:39:16 managed-node3 python3.9[18259]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1727260756.1477296-7642-184004581848371/.source dest=/etc/pcp/pmieconf/power/thermal_throttle owner=root group=root mode=0644 _original_basename=thermal_throttle follow=False checksum=1d53d6182709617c8f633339652d8d9e75f3b603 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:39:17 managed-node3 python3.9[18366]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/pmieconf/zeroconf/all_threads follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Sep 25 06:39:17 managed-node3 python3.9[18453]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1727260756.7704017-7642-253975492016925/.source dest=/etc/pcp/pmieconf/zeroconf/all_threads owner=root group=root mode=0644 _original_basename=all_threads follow=False checksum=65169db16dcaa224c211373001adc3addf1031c4 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:39:17 managed-node3 python3.9[18560]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/pmieconf/filesys/vfs_files follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Sep 25 06:39:17 managed-node3 python3.9[18614]: ansible-ansible.legacy.file Invoked with owner=root group=root mode=0644 dest=/etc/pcp/pmieconf/filesys/vfs_files _original_basename=vfs_files recurse=False state=file path=/etc/pcp/pmieconf/filesys/vfs_files force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:39:18 managed-node3 python3.9[18721]: ansible-lineinfile Invoked with state=absent path=/var/lib/pcp/config/pmie/config.default regexp=//.*global webhook_endpoint = "" backrefs=False create=False backup=False firstmatch=False unsafe_writes=False search_string=None line=None insertafter=None insertbefore=None validate=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:39:18 managed-node3 python3.9[18828]: ansible-file Invoked with src=/etc/pcp/pmieconf/network/tcplistenoverflows dest=/var/lib/pcp/config/pmieconf/network/tcplistenoverflows state=link force=True path=/var/lib/pcp/config/pmieconf/network/tcplistenoverflows recurse=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:39:19 managed-node3 python3.9[18935]: ansible-file Invoked with src=/etc/pcp/pmieconf/network/tcpqfulldocookies dest=/var/lib/pcp/config/pmieconf/network/tcpqfulldocookies state=link force=True path=/var/lib/pcp/config/pmieconf/network/tcpqfulldocookies recurse=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:39:19 managed-node3 python3.9[19042]: ansible-file Invoked with src=/etc/pcp/pmieconf/network/tcpqfulldrops dest=/var/lib/pcp/config/pmieconf/network/tcpqfulldrops state=link force=True path=/var/lib/pcp/config/pmieconf/network/tcpqfulldrops recurse=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:39:19 managed-node3 python3.9[19149]: ansible-file Invoked with src=/etc/pcp/pmieconf/power/thermal_throttle dest=/var/lib/pcp/config/pmieconf/power/thermal_throttle state=link force=True path=/var/lib/pcp/config/pmieconf/power/thermal_throttle recurse=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:39:20 managed-node3 python3.9[19256]: ansible-file Invoked with src=/etc/pcp/pmieconf/zeroconf/all_threads dest=/var/lib/pcp/config/pmieconf/zeroconf/all_threads state=link force=True path=/var/lib/pcp/config/pmieconf/zeroconf/all_threads recurse=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:39:20 managed-node3 python3.9[19363]: ansible-file Invoked with src=/etc/pcp/pmieconf/filesys/vfs_files dest=/var/lib/pcp/config/pmieconf/filesys/vfs_files state=link force=True path=/var/lib/pcp/config/pmieconf/filesys/vfs_files recurse=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:39:21 managed-node3 python3.9[19470]: ansible-ansible.legacy.systemd Invoked with name=pmie state=restarted enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Sep 25 06:39:21 managed-node3 systemd[1]: Stopping pmie farm service... ░░ Subject: A stop job for unit pmie_farm.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit pmie_farm.service has begun execution. ░░ ░░ The job identifier is 1724. Sep 25 06:39:21 managed-node3 systemd[1]: pmie_farm.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit pmie_farm.service has successfully entered the 'dead' state. Sep 25 06:39:21 managed-node3 systemd[1]: Stopped pmie farm service. ░░ Subject: A stop job for unit pmie_farm.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit pmie_farm.service has finished. ░░ ░░ The job identifier is 1724 and the job result is done. Sep 25 06:39:21 managed-node3 systemd[1]: Stopping Performance Metrics Inference Engine... ░░ Subject: A stop job for unit pmie.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit pmie.service has begun execution. ░░ ░░ The job identifier is 1656. Sep 25 06:39:21 managed-node3 systemd[1]: pmie.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit pmie.service has successfully entered the 'dead' state. Sep 25 06:39:21 managed-node3 systemd[1]: Stopped Performance Metrics Inference Engine. ░░ Subject: A stop job for unit pmie.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit pmie.service has finished. ░░ ░░ The job identifier is 1656 and the job result is done. Sep 25 06:39:21 managed-node3 systemd[1]: Starting Performance Metrics Inference Engine... ░░ Subject: A start job for unit pmie.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmie.service has begun execution. ░░ ░░ The job identifier is 1656. Sep 25 06:39:21 managed-node3 systemd[1]: Started Performance Metrics Inference Engine. ░░ Subject: A start job for unit pmie.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmie.service has finished successfully. ░░ ░░ The job identifier is 1656. Sep 25 06:39:21 managed-node3 systemd[1]: Starting pmie farm service... ░░ Subject: A start job for unit pmie_farm.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmie_farm.service has begun execution. ░░ ░░ The job identifier is 1724. Sep 25 06:39:21 managed-node3 systemd[1]: Started pmie farm service. ░░ Subject: A start job for unit pmie_farm.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmie_farm.service has finished successfully. ░░ ░░ The job identifier is 1724. Sep 25 06:39:21 managed-node3 python3.9[20115]: ansible-lineinfile Invoked with path=/etc/pcp.conf regexp=^PCP_ARCHIVE_DIR= line=PCP_ARCHIVE_DIR=/var/log/pcp/pmlogger state=present backrefs=False create=False backup=False firstmatch=False unsafe_writes=False search_string=None insertafter=None insertbefore=None validate=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:39:22 managed-node3 python3.9[20222]: ansible-ansible.legacy.stat Invoked with path=/etc/sysconfig/pmlogger follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Sep 25 06:39:22 managed-node3 python3.9[20309]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1727260762.1093764-8194-95932988982478/.source dest=/etc/sysconfig/pmlogger mode=0644 follow=False _original_basename=pmlogger.defaults.j2 checksum=67bc35973101c614e92b1990f8bebfffc39fe498 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:39:23 managed-node3 python3.9[20416]: ansible-ansible.legacy.stat Invoked with path=/etc/sysconfig/pmlogger_timers follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Sep 25 06:39:23 managed-node3 python3.9[20503]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1727260762.7836943-8236-63103719521422/.source dest=/etc/sysconfig/pmlogger_timers mode=0644 follow=False _original_basename=pmlogger.timers.j2 checksum=df7bd3b5b6f1de3af164aab81441c7251a13a298 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:39:24 managed-node3 python3.9[20610]: ansible-ansible.legacy.systemd Invoked with name=pmlogger state=restarted enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Sep 25 06:39:24 managed-node3 systemd[1]: Stopping pmlogger farm service... ░░ Subject: A stop job for unit pmlogger_farm.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit pmlogger_farm.service has begun execution. ░░ ░░ The job identifier is 1793. Sep 25 06:39:24 managed-node3 systemd[1]: pmlogger_farm.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit pmlogger_farm.service has successfully entered the 'dead' state. Sep 25 06:39:24 managed-node3 systemd[1]: Stopped pmlogger farm service. ░░ Subject: A stop job for unit pmlogger_farm.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit pmlogger_farm.service has finished. ░░ ░░ The job identifier is 1793 and the job result is done. Sep 25 06:39:24 managed-node3 systemd[1]: Stopping Performance Metrics Archive Logger... ░░ Subject: A stop job for unit pmlogger.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit pmlogger.service has begun execution. ░░ ░░ The job identifier is 1725. Sep 25 06:39:24 managed-node3 systemd[1]: pmlogger.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit pmlogger.service has successfully entered the 'dead' state. Sep 25 06:39:24 managed-node3 systemd[1]: Stopped Performance Metrics Archive Logger. ░░ Subject: A stop job for unit pmlogger.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit pmlogger.service has finished. ░░ ░░ The job identifier is 1725 and the job result is done. Sep 25 06:39:24 managed-node3 systemd[1]: pmlogger.service: Consumed 1.466s CPU time. ░░ Subject: Resources consumed by unit runtime ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit pmlogger.service completed and consumed the indicated resources. Sep 25 06:39:24 managed-node3 systemd[1]: Starting Performance Metrics Archive Logger... ░░ Subject: A start job for unit pmlogger.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmlogger.service has begun execution. ░░ ░░ The job identifier is 1725. Sep 25 06:39:24 managed-node3 systemd[1]: Started Performance Metrics Archive Logger. ░░ Subject: A start job for unit pmlogger.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmlogger.service has finished successfully. ░░ ░░ The job identifier is 1725. Sep 25 06:39:24 managed-node3 systemd[1]: Starting pmlogger farm service... ░░ Subject: A start job for unit pmlogger_farm.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmlogger_farm.service has begun execution. ░░ ░░ The job identifier is 1793. Sep 25 06:39:24 managed-node3 systemd[1]: Started pmlogger farm service. ░░ Subject: A start job for unit pmlogger_farm.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmlogger_farm.service has finished successfully. ░░ ░░ The job identifier is 1793. Sep 25 06:39:25 managed-node3 python3.9[21623]: ansible-ansible.legacy.systemd Invoked with name=pmlogger state=restarted daemon_reload=False daemon_reexec=False scope=system no_block=False enabled=None force=None masked=None Sep 25 06:39:25 managed-node3 systemd[1]: Stopping pmlogger farm service... ░░ Subject: A stop job for unit pmlogger_farm.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit pmlogger_farm.service has begun execution. ░░ ░░ The job identifier is 1862. Sep 25 06:39:25 managed-node3 systemd[1]: pmlogger_farm.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit pmlogger_farm.service has successfully entered the 'dead' state. Sep 25 06:39:25 managed-node3 systemd[1]: Stopped pmlogger farm service. ░░ Subject: A stop job for unit pmlogger_farm.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit pmlogger_farm.service has finished. ░░ ░░ The job identifier is 1862 and the job result is done. Sep 25 06:39:25 managed-node3 systemd[1]: Stopping Performance Metrics Archive Logger... ░░ Subject: A stop job for unit pmlogger.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit pmlogger.service has begun execution. ░░ ░░ The job identifier is 1794. Sep 25 06:39:26 managed-node3 systemd[1]: pmlogger.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit pmlogger.service has successfully entered the 'dead' state. Sep 25 06:39:26 managed-node3 systemd[1]: Stopped Performance Metrics Archive Logger. ░░ Subject: A stop job for unit pmlogger.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit pmlogger.service has finished. ░░ ░░ The job identifier is 1794 and the job result is done. Sep 25 06:39:26 managed-node3 systemd[1]: pmlogger.service: Consumed 1.134s CPU time. ░░ Subject: Resources consumed by unit runtime ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit pmlogger.service completed and consumed the indicated resources. Sep 25 06:39:26 managed-node3 systemd[1]: Starting Performance Metrics Archive Logger... ░░ Subject: A start job for unit pmlogger.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmlogger.service has begun execution. ░░ ░░ The job identifier is 1794. Sep 25 06:39:26 managed-node3 systemd[1]: Started Performance Metrics Archive Logger. ░░ Subject: A start job for unit pmlogger.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmlogger.service has finished successfully. ░░ ░░ The job identifier is 1794. Sep 25 06:39:26 managed-node3 systemd[1]: Starting pmlogger farm service... ░░ Subject: A start job for unit pmlogger_farm.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmlogger_farm.service has begun execution. ░░ ░░ The job identifier is 1862. Sep 25 06:39:26 managed-node3 systemd[1]: Started pmlogger farm service. ░░ Subject: A start job for unit pmlogger_farm.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmlogger_farm.service has finished successfully. ░░ ░░ The job identifier is 1862. Sep 25 06:39:27 managed-node3 python3.9[22224]: ansible-service_facts Invoked Sep 25 06:39:30 managed-node3 python3.9[22862]: ansible-ansible.legacy.setup Invoked with gather_subset=['all'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d Sep 25 06:39:31 managed-node3 python3.9[22995]: ansible-service_facts Invoked Sep 25 06:39:33 managed-node3 python3.9[23187]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Sep 25 06:39:33 managed-node3 python3.9[23294]: ansible-ansible.legacy.dnf Invoked with name=['pcp', 'pcp-zeroconf'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Sep 25 06:39:34 managed-node3 python3.9[23402]: ansible-ansible.legacy.dnf Invoked with name=['cyrus-sasl-lib', 'cyrus-sasl-scram'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Sep 25 06:39:36 managed-node3 python3.9[23510]: ansible-ansible.legacy.command Invoked with _raw_params=cat /etc/pcp/pmcd/pmcd.conf _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Sep 25 06:39:36 managed-node3 python3.9[23618]: ansible-file Invoked with path=/etc/pcp/labels state=directory mode=0755 owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:39:37 managed-node3 python3.9[23725]: ansible-file Invoked with path=/etc/pcp/labels/optional state=directory mode=0755 owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:39:37 managed-node3 python3.9[23832]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/labels/ansible-managed follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Sep 25 06:39:37 managed-node3 python3.9[23886]: ansible-ansible.legacy.file Invoked with mode=0644 dest=/etc/pcp/labels/ansible-managed _original_basename=pmcd.explicit.labels.j2 recurse=False state=file path=/etc/pcp/labels/ansible-managed force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:39:38 managed-node3 python3.9[23993]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/labels/optional/ansible-managed follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Sep 25 06:39:38 managed-node3 python3.9[24047]: ansible-ansible.legacy.file Invoked with mode=0644 dest=/etc/pcp/labels/optional/ansible-managed _original_basename=pmcd.implicit.labels.j2 recurse=False state=file path=/etc/pcp/labels/optional/ansible-managed force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:39:38 managed-node3 python3.9[24154]: ansible-ansible.legacy.stat Invoked with path=/etc/sysconfig/pmcd follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Sep 25 06:39:39 managed-node3 python3.9[24208]: ansible-ansible.legacy.file Invoked with mode=0644 dest=/etc/sysconfig/pmcd _original_basename=pmcd.defaults.j2 recurse=False state=file path=/etc/sysconfig/pmcd force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:39:39 managed-node3 python3.9[24315]: ansible-user Invoked with name=metrics system=True state=present non_unique=False force=False remove=False create_home=True move_home=False append=False ssh_key_bits=0 ssh_key_type=rsa ssh_key_comment=ansible-generated on managed-node3 update_password=always uid=None group=None groups=None comment=None home=None shell=None password=NOT_LOGGING_PARAMETER login_class=None password_expire_max=None password_expire_min=None password_expire_warn=None hidden=None seuser=None skeleton=None generate_ssh_key=None ssh_key_file=None ssh_key_passphrase=NOT_LOGGING_PARAMETER expires=None password_lock=None local=None profile=None authorization=None role=None umask=None Sep 25 06:39:40 managed-node3 python3.9[24424]: ansible-ansible.legacy.command Invoked with _raw_params=set -eu if set -o | grep -q pipefail; then set -o pipefail # pipefail not supported on debian, some ubuntu fi if ! sasldblistusers2 -f "/etc/pcp/passwd.db" | grep -q "^metrics@"; then echo "Creating new metrics user in /etc/pcp/passwd.db" echo "metrics" | saslpasswd2 -a pmcd "metrics" chown root:pcp "/etc/pcp/passwd.db" chmod 640 "/etc/pcp/passwd.db" fi _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Sep 25 06:39:40 managed-node3 python3.9[24536]: ansible-ansible.legacy.stat Invoked with path=/etc/sasl2/pmcd.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Sep 25 06:39:40 managed-node3 python3.9[24590]: ansible-ansible.legacy.file Invoked with mode=0644 dest=/etc/sasl2/pmcd.conf _original_basename=pmcd.sasl2.conf.j2 recurse=False state=file path=/etc/sasl2/pmcd.conf force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:39:41 managed-node3 python3.9[24697]: ansible-ansible.legacy.systemd Invoked with name=pmcd state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Sep 25 06:39:41 managed-node3 python3.9[24806]: ansible-file Invoked with path=/etc/pcp/pmieconf/network state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:39:42 managed-node3 python3.9[24913]: ansible-file Invoked with path=/etc/pcp/pmieconf/power state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:39:42 managed-node3 python3.9[25020]: ansible-file Invoked with path=/etc/pcp/pmieconf/zeroconf state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:39:42 managed-node3 python3.9[25127]: ansible-file Invoked with path=/etc/pcp/pmieconf/filesys state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:39:43 managed-node3 python3.9[25234]: ansible-file Invoked with path=/var/lib/pcp/config/pmieconf/network state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:39:43 managed-node3 python3.9[25341]: ansible-file Invoked with path=/var/lib/pcp/config/pmieconf/power state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:39:43 managed-node3 python3.9[25448]: ansible-file Invoked with path=/var/lib/pcp/config/pmieconf/zeroconf state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:39:44 managed-node3 python3.9[25555]: ansible-file Invoked with path=/var/lib/pcp/config/pmieconf/filesys state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:39:44 managed-node3 python3.9[25662]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/pmieconf/network/tcplistenoverflows follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Sep 25 06:39:44 managed-node3 python3.9[25716]: ansible-ansible.legacy.file Invoked with owner=root group=root mode=0644 dest=/etc/pcp/pmieconf/network/tcplistenoverflows _original_basename=tcplistenoverflows recurse=False state=file path=/etc/pcp/pmieconf/network/tcplistenoverflows force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:39:44 managed-node3 python3.9[25823]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/pmieconf/network/tcpqfulldocookies follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Sep 25 06:39:45 managed-node3 python3.9[25877]: ansible-ansible.legacy.file Invoked with owner=root group=root mode=0644 dest=/etc/pcp/pmieconf/network/tcpqfulldocookies _original_basename=tcpqfulldocookies recurse=False state=file path=/etc/pcp/pmieconf/network/tcpqfulldocookies force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:39:45 managed-node3 python3.9[25984]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/pmieconf/network/tcpqfulldrops follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Sep 25 06:39:45 managed-node3 python3.9[26038]: ansible-ansible.legacy.file Invoked with owner=root group=root mode=0644 dest=/etc/pcp/pmieconf/network/tcpqfulldrops _original_basename=tcpqfulldrops recurse=False state=file path=/etc/pcp/pmieconf/network/tcpqfulldrops force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:39:45 managed-node3 python3.9[26145]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/pmieconf/power/thermal_throttle follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Sep 25 06:39:46 managed-node3 python3.9[26199]: ansible-ansible.legacy.file Invoked with owner=root group=root mode=0644 dest=/etc/pcp/pmieconf/power/thermal_throttle _original_basename=thermal_throttle recurse=False state=file path=/etc/pcp/pmieconf/power/thermal_throttle force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:39:46 managed-node3 python3.9[26306]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/pmieconf/zeroconf/all_threads follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Sep 25 06:39:46 managed-node3 python3.9[26360]: ansible-ansible.legacy.file Invoked with owner=root group=root mode=0644 dest=/etc/pcp/pmieconf/zeroconf/all_threads _original_basename=all_threads recurse=False state=file path=/etc/pcp/pmieconf/zeroconf/all_threads force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:39:46 managed-node3 python3.9[26467]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/pmieconf/filesys/vfs_files follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Sep 25 06:39:47 managed-node3 python3.9[26521]: ansible-ansible.legacy.file Invoked with owner=root group=root mode=0644 dest=/etc/pcp/pmieconf/filesys/vfs_files _original_basename=vfs_files recurse=False state=file path=/etc/pcp/pmieconf/filesys/vfs_files force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:39:47 managed-node3 python3.9[26628]: ansible-lineinfile Invoked with state=absent path=/var/lib/pcp/config/pmie/config.default regexp=//.*global webhook_endpoint = "" backrefs=False create=False backup=False firstmatch=False unsafe_writes=False search_string=None line=None insertafter=None insertbefore=None validate=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:39:48 managed-node3 python3.9[26735]: ansible-file Invoked with src=/etc/pcp/pmieconf/network/tcplistenoverflows dest=/var/lib/pcp/config/pmieconf/network/tcplistenoverflows state=link force=True path=/var/lib/pcp/config/pmieconf/network/tcplistenoverflows recurse=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:39:48 managed-node3 python3.9[26842]: ansible-file Invoked with src=/etc/pcp/pmieconf/network/tcpqfulldocookies dest=/var/lib/pcp/config/pmieconf/network/tcpqfulldocookies state=link force=True path=/var/lib/pcp/config/pmieconf/network/tcpqfulldocookies recurse=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:39:48 managed-node3 python3.9[26949]: ansible-file Invoked with src=/etc/pcp/pmieconf/network/tcpqfulldrops dest=/var/lib/pcp/config/pmieconf/network/tcpqfulldrops state=link force=True path=/var/lib/pcp/config/pmieconf/network/tcpqfulldrops recurse=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:39:49 managed-node3 python3.9[27056]: ansible-file Invoked with src=/etc/pcp/pmieconf/power/thermal_throttle dest=/var/lib/pcp/config/pmieconf/power/thermal_throttle state=link force=True path=/var/lib/pcp/config/pmieconf/power/thermal_throttle recurse=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:39:49 managed-node3 python3.9[27163]: ansible-file Invoked with src=/etc/pcp/pmieconf/zeroconf/all_threads dest=/var/lib/pcp/config/pmieconf/zeroconf/all_threads state=link force=True path=/var/lib/pcp/config/pmieconf/zeroconf/all_threads recurse=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:39:49 managed-node3 python3.9[27270]: ansible-file Invoked with src=/etc/pcp/pmieconf/filesys/vfs_files dest=/var/lib/pcp/config/pmieconf/filesys/vfs_files state=link force=True path=/var/lib/pcp/config/pmieconf/filesys/vfs_files recurse=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:39:50 managed-node3 python3.9[27377]: ansible-ansible.legacy.systemd Invoked with name=pmie state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Sep 25 06:39:50 managed-node3 python3.9[27486]: ansible-lineinfile Invoked with path=/etc/pcp.conf regexp=^PCP_ARCHIVE_DIR= line=PCP_ARCHIVE_DIR=/var/log/pcp/pmlogger state=present backrefs=False create=False backup=False firstmatch=False unsafe_writes=False search_string=None insertafter=None insertbefore=None validate=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:39:50 managed-node3 python3.9[27593]: ansible-ansible.legacy.stat Invoked with path=/etc/sysconfig/pmlogger follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Sep 25 06:39:51 managed-node3 python3.9[27647]: ansible-ansible.legacy.file Invoked with mode=0644 dest=/etc/sysconfig/pmlogger _original_basename=pmlogger.defaults.j2 recurse=False state=file path=/etc/sysconfig/pmlogger force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:39:51 managed-node3 python3.9[27754]: ansible-ansible.legacy.stat Invoked with path=/etc/sysconfig/pmlogger_timers follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Sep 25 06:39:51 managed-node3 python3.9[27808]: ansible-ansible.legacy.file Invoked with mode=0644 dest=/etc/sysconfig/pmlogger_timers _original_basename=pmlogger.timers.j2 recurse=False state=file path=/etc/sysconfig/pmlogger_timers force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:39:52 managed-node3 python3.9[27915]: ansible-ansible.legacy.systemd Invoked with name=pmlogger state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Sep 25 06:39:53 managed-node3 python3.9[28024]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Sep 25 06:39:53 managed-node3 python3.9[28131]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Sep 25 06:39:54 managed-node3 python3.9[28238]: ansible-ansible.legacy.dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Sep 25 06:39:55 managed-node3 python3.9[28346]: ansible-ansible.legacy.dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Sep 25 06:39:56 managed-node3 dbus-broker-launch[578]: Noticed file-system modification, trigger reload. ░░ Subject: A configuration directory was written to ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ A write was detected to one of the directories containing D-Bus configuration ░░ files, triggering a configuration reload. ░░ ░░ This functionality exists for backwards compatibility to pick up changes to ░░ D-Bus configuration without an explicit reolad request. Typically when ░░ installing or removing third-party software causes D-Bus configuration files ░░ to be added or removed. ░░ ░░ It is worth noting that this may cause partial configuration to be loaded in ░░ case dispatching this notification races with the writing of the configuration ░░ files. However, a future notification will then cause the configuration to be ░░ reladed again. Sep 25 06:39:56 managed-node3 dbus-broker-launch[578]: Noticed file-system modification, trigger reload. ░░ Subject: A configuration directory was written to ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ A write was detected to one of the directories containing D-Bus configuration ░░ files, triggering a configuration reload. ░░ ░░ This functionality exists for backwards compatibility to pick up changes to ░░ D-Bus configuration without an explicit reolad request. Typically when ░░ installing or removing third-party software causes D-Bus configuration files ░░ to be added or removed. ░░ ░░ It is worth noting that this may cause partial configuration to be loaded in ░░ case dispatching this notification races with the writing of the configuration ░░ files. However, a future notification will then cause the configuration to be ░░ reladed again. Sep 25 06:39:56 managed-node3 systemd[1]: Started /usr/bin/systemctl start man-db-cache-update. ░░ Subject: A start job for unit run-r29cdac7038f54174a47c89e4e1e945af.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit run-r29cdac7038f54174a47c89e4e1e945af.service has finished successfully. ░░ ░░ The job identifier is 1865. Sep 25 06:39:56 managed-node3 systemd[1]: Starting man-db-cache-update.service... ░░ Subject: A start job for unit man-db-cache-update.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit man-db-cache-update.service has begun execution. ░░ ░░ The job identifier is 1928. Sep 25 06:39:57 managed-node3 python3.9[28689]: ansible-setup Invoked with filter=['ansible_selinux'] gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d Sep 25 06:39:58 managed-node3 systemd[1]: man-db-cache-update.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit man-db-cache-update.service has successfully entered the 'dead' state. Sep 25 06:39:58 managed-node3 systemd[1]: Finished man-db-cache-update.service. ░░ Subject: A start job for unit man-db-cache-update.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit man-db-cache-update.service has finished successfully. ░░ ░░ The job identifier is 1928. Sep 25 06:39:58 managed-node3 systemd[1]: run-r29cdac7038f54174a47c89e4e1e945af.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-r29cdac7038f54174a47c89e4e1e945af.service has successfully entered the 'dead' state. Sep 25 06:39:58 managed-node3 python3.9[28945]: ansible-seboolean Invoked with name=pcp_bind_all_unreserved_ports state=True persistent=False ignore_selinux_state=False Sep 25 06:39:59 managed-node3 python3.9[29052]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked Sep 25 06:40:02 managed-node3 python3.9[29159]: ansible-ansible.legacy.command Invoked with _raw_params=pcp _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Sep 25 06:40:03 managed-node3 python3.9[29357]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail pmprobe -I pmcd.pmlogger.pmcd_host | grep '"primary"' _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Sep 25 06:40:03 managed-node3 python3.9[29467]: ansible-ansible.legacy.command Invoked with _raw_params=grep "^# Ansible managed" "/etc/sysconfig/pmlogger" _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Sep 25 06:40:04 managed-node3 python3.9[29575]: ansible-ansible.legacy.command Invoked with _raw_params=grep "^# Ansible managed" "/etc/sysconfig/pmlogger_timers" _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Sep 25 06:40:04 managed-node3 python3.9[29683]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail pmprobe -I pmcd.pmie.pmcd_host | grep '"primary"' _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Sep 25 06:40:05 managed-node3 python3.9[29793]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail semanage boolean --list | egrep "pcp_bind_all_unreserved_ports *\(on " _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Sep 25 06:40:06 managed-node3 python3.9[29903]: ansible-service_facts Invoked Sep 25 06:40:06 managed-node3 dbus-broker-launch[579]: avc: op=load_policy lsm=selinux seqno=3 res=1 Sep 25 06:40:07 managed-node3 python3.9[30095]: ansible-ansible.legacy.systemd Invoked with name=pmcd state=started daemon_reload=False daemon_reexec=False scope=system no_block=False enabled=None force=None masked=None Sep 25 06:40:08 managed-node3 python3.9[30203]: ansible-ansible.legacy.systemd Invoked with name=pmlogger state=started daemon_reload=False daemon_reexec=False scope=system no_block=False enabled=None force=None masked=None Sep 25 06:40:08 managed-node3 python3.9[30311]: ansible-ansible.legacy.systemd Invoked with name=pmie state=started daemon_reload=False daemon_reexec=False scope=system no_block=False enabled=None force=None masked=None Sep 25 06:40:09 managed-node3 python3.9[30419]: ansible-ansible.legacy.systemd Invoked with name=pmproxy state=stopped daemon_reload=False daemon_reexec=False scope=system no_block=False enabled=None force=None masked=None Sep 25 06:40:11 managed-node3 python3.9[30562]: ansible-ansible.legacy.setup Invoked with gather_subset=['all'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d Sep 25 06:40:11 managed-node3 python3.9[30695]: ansible-service_facts Invoked Sep 25 06:40:13 managed-node3 python3.9[30887]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Sep 25 06:40:14 managed-node3 python3.9[30994]: ansible-ansible.legacy.dnf Invoked with name=['pcp-pmda-elasticsearch'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Sep 25 06:40:16 managed-node3 python3.9[31109]: ansible-file Invoked with path=/etc/pcp/elasticsearch state=directory mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:40:16 managed-node3 python3.9[31216]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/elasticsearch/elasticsearch.conf follow=True get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Sep 25 06:40:17 managed-node3 python3.9[31303]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1727260816.6139014-10480-268604213220760/.source.conf dest=/etc/pcp/elasticsearch/elasticsearch.conf follow=True mode=0600 _original_basename=elasticsearch.conf.j2 checksum=13f91d28ea10d21516fb892b9c304eb8001fb026 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:40:18 managed-node3 python3.9[31410]: ansible-ansible.legacy.dnf Invoked with name=['pcp', 'pcp-zeroconf'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Sep 25 06:40:19 managed-node3 python3.9[31518]: ansible-ansible.legacy.dnf Invoked with name=['cyrus-sasl-lib', 'cyrus-sasl-scram'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Sep 25 06:40:20 managed-node3 python3.9[31626]: ansible-ansible.legacy.command Invoked with _raw_params=cat /etc/pcp/pmcd/pmcd.conf _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Sep 25 06:40:21 managed-node3 python3.9[31734]: ansible-file Invoked with path=/var/lib/pcp/pmdas/elasticsearch/.NeedInstall mode=u=rw,g=r,o=r state=touch recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:40:21 managed-node3 python3.9[31841]: ansible-file Invoked with path=/etc/pcp/labels state=directory mode=0755 owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:40:21 managed-node3 python3.9[31948]: ansible-file Invoked with path=/etc/pcp/labels/optional state=directory mode=0755 owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:40:22 managed-node3 python3.9[32055]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/labels/ansible-managed follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Sep 25 06:40:22 managed-node3 python3.9[32109]: ansible-ansible.legacy.file Invoked with mode=0644 dest=/etc/pcp/labels/ansible-managed _original_basename=pmcd.explicit.labels.j2 recurse=False state=file path=/etc/pcp/labels/ansible-managed force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:40:22 managed-node3 python3.9[32216]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/labels/optional/ansible-managed follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Sep 25 06:40:22 managed-node3 python3.9[32270]: ansible-ansible.legacy.file Invoked with mode=0644 dest=/etc/pcp/labels/optional/ansible-managed _original_basename=pmcd.implicit.labels.j2 recurse=False state=file path=/etc/pcp/labels/optional/ansible-managed force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:40:23 managed-node3 python3.9[32377]: ansible-ansible.legacy.stat Invoked with path=/etc/sysconfig/pmcd follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Sep 25 06:40:23 managed-node3 python3.9[32431]: ansible-ansible.legacy.file Invoked with mode=0644 dest=/etc/sysconfig/pmcd _original_basename=pmcd.defaults.j2 recurse=False state=file path=/etc/sysconfig/pmcd force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:40:23 managed-node3 python3.9[32538]: ansible-user Invoked with name=metrics system=True state=present non_unique=False force=False remove=False create_home=True move_home=False append=False ssh_key_bits=0 ssh_key_type=rsa ssh_key_comment=ansible-generated on managed-node3 update_password=always uid=None group=None groups=None comment=None home=None shell=None password=NOT_LOGGING_PARAMETER login_class=None password_expire_max=None password_expire_min=None password_expire_warn=None hidden=None seuser=None skeleton=None generate_ssh_key=None ssh_key_file=None ssh_key_passphrase=NOT_LOGGING_PARAMETER expires=None password_lock=None local=None profile=None authorization=None role=None umask=None Sep 25 06:40:24 managed-node3 python3.9[32647]: ansible-ansible.legacy.command Invoked with _raw_params=set -eu if set -o | grep -q pipefail; then set -o pipefail # pipefail not supported on debian, some ubuntu fi if ! sasldblistusers2 -f "/etc/pcp/passwd.db" | grep -q "^metrics@"; then echo "Creating new metrics user in /etc/pcp/passwd.db" echo "metrics" | saslpasswd2 -a pmcd "metrics" chown root:pcp "/etc/pcp/passwd.db" chmod 640 "/etc/pcp/passwd.db" fi _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Sep 25 06:40:24 managed-node3 python3.9[32759]: ansible-ansible.legacy.stat Invoked with path=/etc/sasl2/pmcd.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Sep 25 06:40:24 managed-node3 python3.9[32813]: ansible-ansible.legacy.file Invoked with mode=0644 dest=/etc/sasl2/pmcd.conf _original_basename=pmcd.sasl2.conf.j2 recurse=False state=file path=/etc/sasl2/pmcd.conf force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:40:25 managed-node3 python3.9[32920]: ansible-ansible.legacy.systemd Invoked with name=pmcd state=restarted enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Sep 25 06:40:25 managed-node3 systemd[1]: Stopping Performance Metrics Collector Daemon... ░░ Subject: A stop job for unit pmcd.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit pmcd.service has begun execution. ░░ ░░ The job identifier is 1991. Sep 25 06:40:26 managed-node3 systemd[1]: pmcd.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit pmcd.service has successfully entered the 'dead' state. Sep 25 06:40:26 managed-node3 systemd[1]: Stopped Performance Metrics Collector Daemon. ░░ Subject: A stop job for unit pmcd.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit pmcd.service has finished. ░░ ░░ The job identifier is 1991 and the job result is done. Sep 25 06:40:26 managed-node3 systemd[1]: pmcd.service: Consumed 3.205s CPU time. ░░ Subject: Resources consumed by unit runtime ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit pmcd.service completed and consumed the indicated resources. Sep 25 06:40:26 managed-node3 systemd[1]: Starting Performance Metrics Collector Daemon... ░░ Subject: A start job for unit pmcd.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmcd.service has begun execution. ░░ ░░ The job identifier is 1991. Sep 25 06:40:26 managed-node3 systemd[1]: Started Performance Metrics Collector Daemon. ░░ Subject: A start job for unit pmcd.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmcd.service has finished successfully. ░░ ░░ The job identifier is 1991. Sep 25 06:40:26 managed-node3 pmcd[33436]: Installing elasticsearch PMDA ... Sep 25 06:40:27 managed-node3 python3.9[33558]: ansible-file Invoked with path=/etc/pcp/pmieconf/network state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:40:27 managed-node3 python3.9[33753]: ansible-file Invoked with path=/etc/pcp/pmieconf/power state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:40:27 managed-node3 python3.9[33860]: ansible-file Invoked with path=/etc/pcp/pmieconf/zeroconf state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:40:28 managed-node3 python3.9[33967]: ansible-file Invoked with path=/etc/pcp/pmieconf/filesys state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:40:28 managed-node3 python3.9[34115]: ansible-file Invoked with path=/var/lib/pcp/config/pmieconf/network state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:40:28 managed-node3 python3.9[34274]: ansible-file Invoked with path=/var/lib/pcp/config/pmieconf/power state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:40:29 managed-node3 python3.9[34381]: ansible-file Invoked with path=/var/lib/pcp/config/pmieconf/zeroconf state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:40:29 managed-node3 python3.9[34488]: ansible-file Invoked with path=/var/lib/pcp/config/pmieconf/filesys state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:40:29 managed-node3 python3.9[34600]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/pmieconf/network/tcplistenoverflows follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Sep 25 06:40:30 managed-node3 python3.9[34654]: ansible-ansible.legacy.file Invoked with owner=root group=root mode=0644 dest=/etc/pcp/pmieconf/network/tcplistenoverflows _original_basename=tcplistenoverflows recurse=False state=file path=/etc/pcp/pmieconf/network/tcplistenoverflows force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:40:30 managed-node3 python3.9[34761]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/pmieconf/network/tcpqfulldocookies follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Sep 25 06:40:30 managed-node3 python3.9[34815]: ansible-ansible.legacy.file Invoked with owner=root group=root mode=0644 dest=/etc/pcp/pmieconf/network/tcpqfulldocookies _original_basename=tcpqfulldocookies recurse=False state=file path=/etc/pcp/pmieconf/network/tcpqfulldocookies force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:40:30 managed-node3 python3.9[34933]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/pmieconf/network/tcpqfulldrops follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Sep 25 06:40:31 managed-node3 python3.9[34987]: ansible-ansible.legacy.file Invoked with owner=root group=root mode=0644 dest=/etc/pcp/pmieconf/network/tcpqfulldrops _original_basename=tcpqfulldrops recurse=False state=file path=/etc/pcp/pmieconf/network/tcpqfulldrops force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:40:31 managed-node3 python3.9[35094]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/pmieconf/power/thermal_throttle follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Sep 25 06:40:31 managed-node3 python3.9[35148]: ansible-ansible.legacy.file Invoked with owner=root group=root mode=0644 dest=/etc/pcp/pmieconf/power/thermal_throttle _original_basename=thermal_throttle recurse=False state=file path=/etc/pcp/pmieconf/power/thermal_throttle force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:40:32 managed-node3 python3.9[35255]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/pmieconf/zeroconf/all_threads follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Sep 25 06:40:32 managed-node3 python3.9[35309]: ansible-ansible.legacy.file Invoked with owner=root group=root mode=0644 dest=/etc/pcp/pmieconf/zeroconf/all_threads _original_basename=all_threads recurse=False state=file path=/etc/pcp/pmieconf/zeroconf/all_threads force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:40:32 managed-node3 python3.9[35416]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/pmieconf/filesys/vfs_files follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Sep 25 06:40:32 managed-node3 python3.9[35470]: ansible-ansible.legacy.file Invoked with owner=root group=root mode=0644 dest=/etc/pcp/pmieconf/filesys/vfs_files _original_basename=vfs_files recurse=False state=file path=/etc/pcp/pmieconf/filesys/vfs_files force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:40:33 managed-node3 python3.9[35577]: ansible-lineinfile Invoked with state=absent path=/var/lib/pcp/config/pmie/config.default regexp=//.*global webhook_endpoint = "" backrefs=False create=False backup=False firstmatch=False unsafe_writes=False search_string=None line=None insertafter=None insertbefore=None validate=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:40:33 managed-node3 python3.9[35684]: ansible-file Invoked with src=/etc/pcp/pmieconf/network/tcplistenoverflows dest=/var/lib/pcp/config/pmieconf/network/tcplistenoverflows state=link force=True path=/var/lib/pcp/config/pmieconf/network/tcplistenoverflows recurse=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:40:34 managed-node3 python3.9[35791]: ansible-file Invoked with src=/etc/pcp/pmieconf/network/tcpqfulldocookies dest=/var/lib/pcp/config/pmieconf/network/tcpqfulldocookies state=link force=True path=/var/lib/pcp/config/pmieconf/network/tcpqfulldocookies recurse=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:40:34 managed-node3 python3.9[35898]: ansible-file Invoked with src=/etc/pcp/pmieconf/network/tcpqfulldrops dest=/var/lib/pcp/config/pmieconf/network/tcpqfulldrops state=link force=True path=/var/lib/pcp/config/pmieconf/network/tcpqfulldrops recurse=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:40:34 managed-node3 python3.9[36005]: ansible-file Invoked with src=/etc/pcp/pmieconf/power/thermal_throttle dest=/var/lib/pcp/config/pmieconf/power/thermal_throttle state=link force=True path=/var/lib/pcp/config/pmieconf/power/thermal_throttle recurse=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:40:34 managed-node3 python3.9[36112]: ansible-file Invoked with src=/etc/pcp/pmieconf/zeroconf/all_threads dest=/var/lib/pcp/config/pmieconf/zeroconf/all_threads state=link force=True path=/var/lib/pcp/config/pmieconf/zeroconf/all_threads recurse=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:40:35 managed-node3 python3.9[36219]: ansible-file Invoked with src=/etc/pcp/pmieconf/filesys/vfs_files dest=/var/lib/pcp/config/pmieconf/filesys/vfs_files state=link force=True path=/var/lib/pcp/config/pmieconf/filesys/vfs_files recurse=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:40:35 managed-node3 python3.9[36326]: ansible-ansible.legacy.systemd Invoked with name=pmie state=restarted enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Sep 25 06:40:35 managed-node3 systemd[1]: Stopping pmie farm service... ░░ Subject: A stop job for unit pmie_farm.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit pmie_farm.service has begun execution. ░░ ░░ The job identifier is 2122. Sep 25 06:40:35 managed-node3 systemd[1]: pmie_farm.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit pmie_farm.service has successfully entered the 'dead' state. Sep 25 06:40:35 managed-node3 systemd[1]: Stopped pmie farm service. ░░ Subject: A stop job for unit pmie_farm.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit pmie_farm.service has finished. ░░ ░░ The job identifier is 2122 and the job result is done. Sep 25 06:40:35 managed-node3 systemd[1]: Stopping Performance Metrics Inference Engine... ░░ Subject: A stop job for unit pmie.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit pmie.service has begun execution. ░░ ░░ The job identifier is 2054. Sep 25 06:40:36 managed-node3 systemd[1]: pmie.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit pmie.service has successfully entered the 'dead' state. Sep 25 06:40:36 managed-node3 systemd[1]: Stopped Performance Metrics Inference Engine. ░░ Subject: A stop job for unit pmie.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit pmie.service has finished. ░░ ░░ The job identifier is 2054 and the job result is done. Sep 25 06:40:36 managed-node3 systemd[1]: Starting Performance Metrics Inference Engine... ░░ Subject: A start job for unit pmie.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmie.service has begun execution. ░░ ░░ The job identifier is 2054. Sep 25 06:40:36 managed-node3 systemd[1]: Started Performance Metrics Inference Engine. ░░ Subject: A start job for unit pmie.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmie.service has finished successfully. ░░ ░░ The job identifier is 2054. Sep 25 06:40:36 managed-node3 systemd[1]: Starting pmie farm service... ░░ Subject: A start job for unit pmie_farm.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmie_farm.service has begun execution. ░░ ░░ The job identifier is 2122. Sep 25 06:40:36 managed-node3 systemd[1]: Started pmie farm service. ░░ Subject: A start job for unit pmie_farm.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmie_farm.service has finished successfully. ░░ ░░ The job identifier is 2122. Sep 25 06:40:36 managed-node3 python3.9[36975]: ansible-lineinfile Invoked with path=/etc/pcp.conf regexp=^PCP_ARCHIVE_DIR= line=PCP_ARCHIVE_DIR=/var/log/pcp/pmlogger state=present backrefs=False create=False backup=False firstmatch=False unsafe_writes=False search_string=None insertafter=None insertbefore=None validate=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:40:37 managed-node3 python3.9[37082]: ansible-ansible.legacy.stat Invoked with path=/etc/sysconfig/pmlogger follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Sep 25 06:40:37 managed-node3 python3.9[37136]: ansible-ansible.legacy.file Invoked with mode=0644 dest=/etc/sysconfig/pmlogger _original_basename=pmlogger.defaults.j2 recurse=False state=file path=/etc/sysconfig/pmlogger force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:40:37 managed-node3 python3.9[37243]: ansible-ansible.legacy.stat Invoked with path=/etc/sysconfig/pmlogger_timers follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Sep 25 06:40:37 managed-node3 python3.9[37297]: ansible-ansible.legacy.file Invoked with mode=0644 dest=/etc/sysconfig/pmlogger_timers _original_basename=pmlogger.timers.j2 recurse=False state=file path=/etc/sysconfig/pmlogger_timers force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:40:38 managed-node3 python3.9[37404]: ansible-ansible.legacy.systemd Invoked with name=pmlogger state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Sep 25 06:40:39 managed-node3 python3.9[37513]: ansible-ansible.legacy.command Invoked with _raw_params=pmprobe -I pmcd.agent.status _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Sep 25 06:40:39 managed-node3 python3.9[37621]: ansible-ansible.legacy.command Invoked with _raw_params=grep "^# Ansible managed" "/etc/pcp/elasticsearch/elasticsearch.conf" _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Sep 25 06:40:40 managed-node3 python3.9[37729]: ansible-service_facts Invoked Sep 25 06:40:41 managed-node3 python3.9[37921]: ansible-ansible.legacy.systemd Invoked with name=pmcd state=started daemon_reload=False daemon_reexec=False scope=system no_block=False enabled=None force=None masked=None Sep 25 06:40:42 managed-node3 python3.9[38029]: ansible-ansible.legacy.systemd Invoked with name=pmlogger state=started daemon_reload=False daemon_reexec=False scope=system no_block=False enabled=None force=None masked=None Sep 25 06:40:42 managed-node3 python3.9[38137]: ansible-ansible.legacy.systemd Invoked with name=pmie state=started daemon_reload=False daemon_reexec=False scope=system no_block=False enabled=None force=None masked=None Sep 25 06:40:43 managed-node3 python3.9[38245]: ansible-ansible.legacy.systemd Invoked with name=pmproxy state=stopped daemon_reload=False daemon_reexec=False scope=system no_block=False enabled=None force=None masked=None Sep 25 06:40:45 managed-node3 python3.9[38388]: ansible-ansible.legacy.setup Invoked with gather_subset=['all'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d Sep 25 06:40:47 managed-node3 python3.9[38556]: ansible-ansible.legacy.setup Invoked with gather_subset=['all'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d Sep 25 06:40:49 managed-node3 python3.9[38724]: ansible-ansible.legacy.setup Invoked with gather_subset=['all'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d Sep 25 06:40:51 managed-node3 python3.9[38892]: ansible-ansible.legacy.setup Invoked with gather_subset=['all'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d Sep 25 06:40:55 managed-node3 python3.9[39060]: ansible-ansible.legacy.setup Invoked with gather_subset=['all'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d Sep 25 06:40:55 managed-node3 python3.9[39193]: ansible-service_facts Invoked Sep 25 06:40:58 managed-node3 python3.9[39385]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Sep 25 06:40:58 managed-node3 python3.9[39492]: ansible-ansible.legacy.dnf Invoked with name=['pcp', 'pcp-zeroconf'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Sep 25 06:40:59 managed-node3 python3.9[39600]: ansible-ansible.legacy.dnf Invoked with name=['cyrus-sasl-lib', 'cyrus-sasl-scram'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Sep 25 06:41:01 managed-node3 python3.9[39708]: ansible-ansible.legacy.command Invoked with _raw_params=cat /etc/pcp/pmcd/pmcd.conf _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Sep 25 06:41:01 managed-node3 python3.9[39816]: ansible-file Invoked with path=/etc/pcp/labels state=directory mode=0755 owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:41:01 managed-node3 python3.9[39923]: ansible-file Invoked with path=/etc/pcp/labels/optional state=directory mode=0755 owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:41:02 managed-node3 python3.9[40030]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/labels/ansible-managed follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Sep 25 06:41:02 managed-node3 python3.9[40084]: ansible-ansible.legacy.file Invoked with mode=0644 dest=/etc/pcp/labels/ansible-managed _original_basename=pmcd.explicit.labels.j2 recurse=False state=file path=/etc/pcp/labels/ansible-managed force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:41:02 managed-node3 python3.9[40191]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/labels/optional/ansible-managed follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Sep 25 06:41:02 managed-node3 python3.9[40245]: ansible-ansible.legacy.file Invoked with mode=0644 dest=/etc/pcp/labels/optional/ansible-managed _original_basename=pmcd.implicit.labels.j2 recurse=False state=file path=/etc/pcp/labels/optional/ansible-managed force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:41:03 managed-node3 python3.9[40352]: ansible-ansible.legacy.stat Invoked with path=/etc/sysconfig/pmcd follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Sep 25 06:41:03 managed-node3 python3.9[40406]: ansible-ansible.legacy.file Invoked with mode=0644 dest=/etc/sysconfig/pmcd _original_basename=pmcd.defaults.j2 recurse=False state=file path=/etc/sysconfig/pmcd force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:41:04 managed-node3 python3.9[40513]: ansible-user Invoked with name=metrics system=True state=present non_unique=False force=False remove=False create_home=True move_home=False append=False ssh_key_bits=0 ssh_key_type=rsa ssh_key_comment=ansible-generated on managed-node3 update_password=always uid=None group=None groups=None comment=None home=None shell=None password=NOT_LOGGING_PARAMETER login_class=None password_expire_max=None password_expire_min=None password_expire_warn=None hidden=None seuser=None skeleton=None generate_ssh_key=None ssh_key_file=None ssh_key_passphrase=NOT_LOGGING_PARAMETER expires=None password_lock=None local=None profile=None authorization=None role=None umask=None Sep 25 06:41:04 managed-node3 python3.9[40622]: ansible-ansible.legacy.command Invoked with _raw_params=set -eu if set -o | grep -q pipefail; then set -o pipefail # pipefail not supported on debian, some ubuntu fi if ! sasldblistusers2 -f "/etc/pcp/passwd.db" | grep -q "^metrics@"; then echo "Creating new metrics user in /etc/pcp/passwd.db" echo "metrics" | saslpasswd2 -a pmcd "metrics" chown root:pcp "/etc/pcp/passwd.db" chmod 640 "/etc/pcp/passwd.db" fi _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Sep 25 06:41:04 managed-node3 python3.9[40734]: ansible-ansible.legacy.stat Invoked with path=/etc/sasl2/pmcd.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Sep 25 06:41:05 managed-node3 python3.9[40788]: ansible-ansible.legacy.file Invoked with mode=0644 dest=/etc/sasl2/pmcd.conf _original_basename=pmcd.sasl2.conf.j2 recurse=False state=file path=/etc/sasl2/pmcd.conf force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:41:05 managed-node3 python3.9[40895]: ansible-ansible.legacy.systemd Invoked with name=pmcd state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Sep 25 06:41:06 managed-node3 python3.9[41004]: ansible-file Invoked with path=/etc/pcp/pmieconf/network state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:41:06 managed-node3 python3.9[41111]: ansible-file Invoked with path=/etc/pcp/pmieconf/power state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:41:06 managed-node3 python3.9[41218]: ansible-file Invoked with path=/etc/pcp/pmieconf/zeroconf state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:41:07 managed-node3 python3.9[41325]: ansible-file Invoked with path=/etc/pcp/pmieconf/filesys state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:41:07 managed-node3 python3.9[41432]: ansible-file Invoked with path=/var/lib/pcp/config/pmieconf/network state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:41:07 managed-node3 systemd[4051]: Created slice User Background Tasks Slice. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 14. Sep 25 06:41:07 managed-node3 systemd[4051]: Starting Cleanup of User's Temporary Files and Directories... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 13. Sep 25 06:41:07 managed-node3 systemd[4051]: Finished Cleanup of User's Temporary Files and Directories. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 13. Sep 25 06:41:07 managed-node3 python3.9[41540]: ansible-file Invoked with path=/var/lib/pcp/config/pmieconf/power state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:41:08 managed-node3 python3.9[41647]: ansible-file Invoked with path=/var/lib/pcp/config/pmieconf/zeroconf state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:41:08 managed-node3 python3.9[41754]: ansible-file Invoked with path=/var/lib/pcp/config/pmieconf/filesys state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:41:08 managed-node3 python3.9[41861]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/pmieconf/network/tcplistenoverflows follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Sep 25 06:41:09 managed-node3 python3.9[41915]: ansible-ansible.legacy.file Invoked with owner=root group=root mode=0644 dest=/etc/pcp/pmieconf/network/tcplistenoverflows _original_basename=tcplistenoverflows recurse=False state=file path=/etc/pcp/pmieconf/network/tcplistenoverflows force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:41:09 managed-node3 python3.9[42022]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/pmieconf/network/tcpqfulldocookies follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Sep 25 06:41:09 managed-node3 python3.9[42076]: ansible-ansible.legacy.file Invoked with owner=root group=root mode=0644 dest=/etc/pcp/pmieconf/network/tcpqfulldocookies _original_basename=tcpqfulldocookies recurse=False state=file path=/etc/pcp/pmieconf/network/tcpqfulldocookies force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:41:09 managed-node3 python3.9[42183]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/pmieconf/network/tcpqfulldrops follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Sep 25 06:41:10 managed-node3 python3.9[42237]: ansible-ansible.legacy.file Invoked with owner=root group=root mode=0644 dest=/etc/pcp/pmieconf/network/tcpqfulldrops _original_basename=tcpqfulldrops recurse=False state=file path=/etc/pcp/pmieconf/network/tcpqfulldrops force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:41:10 managed-node3 python3.9[42344]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/pmieconf/power/thermal_throttle follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Sep 25 06:41:10 managed-node3 python3.9[42398]: ansible-ansible.legacy.file Invoked with owner=root group=root mode=0644 dest=/etc/pcp/pmieconf/power/thermal_throttle _original_basename=thermal_throttle recurse=False state=file path=/etc/pcp/pmieconf/power/thermal_throttle force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:41:10 managed-node3 python3.9[42505]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/pmieconf/zeroconf/all_threads follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Sep 25 06:41:11 managed-node3 python3.9[42559]: ansible-ansible.legacy.file Invoked with owner=root group=root mode=0644 dest=/etc/pcp/pmieconf/zeroconf/all_threads _original_basename=all_threads recurse=False state=file path=/etc/pcp/pmieconf/zeroconf/all_threads force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:41:11 managed-node3 python3.9[42666]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/pmieconf/filesys/vfs_files follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Sep 25 06:41:11 managed-node3 python3.9[42720]: ansible-ansible.legacy.file Invoked with owner=root group=root mode=0644 dest=/etc/pcp/pmieconf/filesys/vfs_files _original_basename=vfs_files recurse=False state=file path=/etc/pcp/pmieconf/filesys/vfs_files force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:41:12 managed-node3 python3.9[42827]: ansible-lineinfile Invoked with state=absent path=/var/lib/pcp/config/pmie/config.default regexp=//.*global webhook_endpoint = "" backrefs=False create=False backup=False firstmatch=False unsafe_writes=False search_string=None line=None insertafter=None insertbefore=None validate=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:41:12 managed-node3 python3.9[42934]: ansible-file Invoked with src=/etc/pcp/pmieconf/network/tcplistenoverflows dest=/var/lib/pcp/config/pmieconf/network/tcplistenoverflows state=link force=True path=/var/lib/pcp/config/pmieconf/network/tcplistenoverflows recurse=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:41:12 managed-node3 python3.9[43041]: ansible-file Invoked with src=/etc/pcp/pmieconf/network/tcpqfulldocookies dest=/var/lib/pcp/config/pmieconf/network/tcpqfulldocookies state=link force=True path=/var/lib/pcp/config/pmieconf/network/tcpqfulldocookies recurse=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:41:13 managed-node3 python3.9[43148]: ansible-file Invoked with src=/etc/pcp/pmieconf/network/tcpqfulldrops dest=/var/lib/pcp/config/pmieconf/network/tcpqfulldrops state=link force=True path=/var/lib/pcp/config/pmieconf/network/tcpqfulldrops recurse=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:41:13 managed-node3 python3.9[43255]: ansible-file Invoked with src=/etc/pcp/pmieconf/power/thermal_throttle dest=/var/lib/pcp/config/pmieconf/power/thermal_throttle state=link force=True path=/var/lib/pcp/config/pmieconf/power/thermal_throttle recurse=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:41:13 managed-node3 python3.9[43362]: ansible-file Invoked with src=/etc/pcp/pmieconf/zeroconf/all_threads dest=/var/lib/pcp/config/pmieconf/zeroconf/all_threads state=link force=True path=/var/lib/pcp/config/pmieconf/zeroconf/all_threads recurse=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:41:14 managed-node3 python3.9[43469]: ansible-file Invoked with src=/etc/pcp/pmieconf/filesys/vfs_files dest=/var/lib/pcp/config/pmieconf/filesys/vfs_files state=link force=True path=/var/lib/pcp/config/pmieconf/filesys/vfs_files recurse=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:41:14 managed-node3 python3.9[43576]: ansible-ansible.legacy.systemd Invoked with name=pmie state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Sep 25 06:41:15 managed-node3 python3.9[43685]: ansible-lineinfile Invoked with path=/etc/pcp.conf regexp=^PCP_ARCHIVE_DIR= line=PCP_ARCHIVE_DIR=/var/log/pcp/pmlogger state=present backrefs=False create=False backup=False firstmatch=False unsafe_writes=False search_string=None insertafter=None insertbefore=None validate=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:41:15 managed-node3 python3.9[43792]: ansible-ansible.legacy.stat Invoked with path=/etc/sysconfig/pmlogger follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Sep 25 06:41:15 managed-node3 python3.9[43846]: ansible-ansible.legacy.file Invoked with mode=0644 dest=/etc/sysconfig/pmlogger _original_basename=pmlogger.defaults.j2 recurse=False state=file path=/etc/sysconfig/pmlogger force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:41:16 managed-node3 python3.9[43953]: ansible-ansible.legacy.stat Invoked with path=/etc/sysconfig/pmlogger_timers follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Sep 25 06:41:16 managed-node3 python3.9[44040]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1727260875.8275948-13827-81185041372575/.source dest=/etc/sysconfig/pmlogger_timers mode=0644 follow=False _original_basename=pmlogger.timers.j2 checksum=cb4ba174284a3ed6fb6ab3e0b10cd8354f1dfc4c backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:41:17 managed-node3 python3.9[44147]: ansible-ansible.legacy.systemd Invoked with name=pmlogger state=restarted enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Sep 25 06:41:17 managed-node3 systemd[1]: Stopping pmlogger farm service... ░░ Subject: A stop job for unit pmlogger_farm.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit pmlogger_farm.service has begun execution. ░░ ░░ The job identifier is 2191. Sep 25 06:41:17 managed-node3 systemd[1]: pmlogger_farm.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit pmlogger_farm.service has successfully entered the 'dead' state. Sep 25 06:41:17 managed-node3 systemd[1]: Stopped pmlogger farm service. ░░ Subject: A stop job for unit pmlogger_farm.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit pmlogger_farm.service has finished. ░░ ░░ The job identifier is 2191 and the job result is done. Sep 25 06:41:17 managed-node3 systemd[1]: Stopping Performance Metrics Archive Logger... ░░ Subject: A stop job for unit pmlogger.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit pmlogger.service has begun execution. ░░ ░░ The job identifier is 2123. Sep 25 06:41:17 managed-node3 systemd[1]: pmlogger.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit pmlogger.service has successfully entered the 'dead' state. Sep 25 06:41:17 managed-node3 systemd[1]: Stopped Performance Metrics Archive Logger. ░░ Subject: A stop job for unit pmlogger.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit pmlogger.service has finished. ░░ ░░ The job identifier is 2123 and the job result is done. Sep 25 06:41:17 managed-node3 systemd[1]: pmlogger.service: Consumed 1.224s CPU time. ░░ Subject: Resources consumed by unit runtime ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit pmlogger.service completed and consumed the indicated resources. Sep 25 06:41:17 managed-node3 systemd[1]: Starting Performance Metrics Archive Logger... ░░ Subject: A start job for unit pmlogger.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmlogger.service has begun execution. ░░ ░░ The job identifier is 2123. Sep 25 06:41:18 managed-node3 systemd[1]: Started Performance Metrics Archive Logger. ░░ Subject: A start job for unit pmlogger.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmlogger.service has finished successfully. ░░ ░░ The job identifier is 2123. Sep 25 06:41:18 managed-node3 systemd[1]: Starting pmlogger farm service... ░░ Subject: A start job for unit pmlogger_farm.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmlogger_farm.service has begun execution. ░░ ░░ The job identifier is 2191. Sep 25 06:41:18 managed-node3 systemd[1]: Started pmlogger farm service. ░░ Subject: A start job for unit pmlogger_farm.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmlogger_farm.service has finished successfully. ░░ ░░ The job identifier is 2191. Sep 25 06:41:19 managed-node3 python3.9[45092]: ansible-ansible.legacy.command Invoked with _raw_params=pcp _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Sep 25 06:41:19 managed-node3 python3.9[45410]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail pmprobe -I pmcd.pmlogger.pmcd_host | grep '"primary"' _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Sep 25 06:41:19 managed-node3 python3.9[45520]: ansible-ansible.legacy.command Invoked with _raw_params=grep "^# Ansible managed" "/etc/sysconfig/pmlogger" _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Sep 25 06:41:20 managed-node3 python3.9[45628]: ansible-ansible.legacy.command Invoked with _raw_params=grep "^# Ansible managed" "/etc/sysconfig/pmlogger_timers" _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Sep 25 06:41:20 managed-node3 python3.9[45736]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail pmprobe -I pmcd.pmie.pmcd_host | grep '"primary"' _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Sep 25 06:41:20 managed-node3 python3.9[45846]: ansible-ansible.legacy.command Invoked with _raw_params=grep -e '--discard 137' /etc/sysconfig/pmlogger_timers _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Sep 25 06:41:21 managed-node3 python3.9[45954]: ansible-ansible.legacy.systemd Invoked with name=pmlogger state=restarted daemon_reload=False daemon_reexec=False scope=system no_block=False enabled=None force=None masked=None Sep 25 06:41:21 managed-node3 systemd[1]: Stopping pmlogger farm service... ░░ Subject: A stop job for unit pmlogger_farm.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit pmlogger_farm.service has begun execution. ░░ ░░ The job identifier is 2260. Sep 25 06:41:21 managed-node3 systemd[1]: pmlogger_farm.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit pmlogger_farm.service has successfully entered the 'dead' state. Sep 25 06:41:21 managed-node3 systemd[1]: Stopped pmlogger farm service. ░░ Subject: A stop job for unit pmlogger_farm.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit pmlogger_farm.service has finished. ░░ ░░ The job identifier is 2260 and the job result is done. Sep 25 06:41:21 managed-node3 systemd[1]: Stopping Performance Metrics Archive Logger... ░░ Subject: A stop job for unit pmlogger.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit pmlogger.service has begun execution. ░░ ░░ The job identifier is 2192. Sep 25 06:41:21 managed-node3 systemd[1]: pmlogger.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit pmlogger.service has successfully entered the 'dead' state. Sep 25 06:41:21 managed-node3 systemd[1]: Stopped Performance Metrics Archive Logger. ░░ Subject: A stop job for unit pmlogger.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit pmlogger.service has finished. ░░ ░░ The job identifier is 2192 and the job result is done. Sep 25 06:41:21 managed-node3 systemd[1]: pmlogger.service: Consumed 1.564s CPU time. ░░ Subject: Resources consumed by unit runtime ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit pmlogger.service completed and consumed the indicated resources. Sep 25 06:41:21 managed-node3 systemd[1]: Starting Performance Metrics Archive Logger... ░░ Subject: A start job for unit pmlogger.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmlogger.service has begun execution. ░░ ░░ The job identifier is 2192. Sep 25 06:41:22 managed-node3 systemd[1]: Started Performance Metrics Archive Logger. ░░ Subject: A start job for unit pmlogger.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmlogger.service has finished successfully. ░░ ░░ The job identifier is 2192. Sep 25 06:41:22 managed-node3 systemd[1]: Starting pmlogger farm service... ░░ Subject: A start job for unit pmlogger_farm.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmlogger_farm.service has begun execution. ░░ ░░ The job identifier is 2260. Sep 25 06:41:22 managed-node3 systemd[1]: Started pmlogger farm service. ░░ Subject: A start job for unit pmlogger_farm.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmlogger_farm.service has finished successfully. ░░ ░░ The job identifier is 2260. Sep 25 06:41:23 managed-node3 python3.9[46628]: ansible-service_facts Invoked Sep 25 06:41:25 managed-node3 python3.9[47214]: ansible-ansible.legacy.systemd Invoked with name=pmcd state=started daemon_reload=False daemon_reexec=False scope=system no_block=False enabled=None force=None masked=None Sep 25 06:41:25 managed-node3 python3.9[47322]: ansible-ansible.legacy.systemd Invoked with name=pmlogger state=started daemon_reload=False daemon_reexec=False scope=system no_block=False enabled=None force=None masked=None Sep 25 06:41:25 managed-node3 python3.9[47430]: ansible-ansible.legacy.systemd Invoked with name=pmie state=started daemon_reload=False daemon_reexec=False scope=system no_block=False enabled=None force=None masked=None Sep 25 06:41:26 managed-node3 python3.9[47538]: ansible-ansible.legacy.systemd Invoked with name=pmproxy state=stopped daemon_reload=False daemon_reexec=False scope=system no_block=False enabled=None force=None masked=None Sep 25 06:41:28 managed-node3 python3.9[47681]: ansible-ansible.legacy.setup Invoked with gather_subset=['all'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d Sep 25 06:41:28 managed-node3 python3.9[47814]: ansible-service_facts Invoked Sep 25 06:41:30 managed-node3 python3.9[48006]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Sep 25 06:41:31 managed-node3 python3.9[48113]: ansible-ansible.legacy.dnf Invoked with name=['valkey'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Sep 25 06:41:32 managed-node3 groupadd[48120]: group added to /etc/group: name=valkey, GID=991 Sep 25 06:41:32 managed-node3 groupadd[48120]: group added to /etc/gshadow: name=valkey Sep 25 06:41:32 managed-node3 groupadd[48120]: new group: name=valkey, GID=991 Sep 25 06:41:32 managed-node3 useradd[48127]: new user: name=valkey, UID=991, GID=991, home=/dev/null, shell=/sbin/nologin, from=none Sep 25 06:41:32 managed-node3 systemd[1]: Reloading. Sep 25 06:41:32 managed-node3 systemd-rc-local-generator[48160]: /etc/rc.d/rc.local is not marked executable, skipping. Sep 25 06:41:32 managed-node3 systemd[1]: Queuing reload/restart jobs for marked units… Sep 25 06:41:33 managed-node3 python3.9[48279]: ansible-file Invoked with path=/etc/redis state=directory owner=redis group=root mode=0750 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Sep 25 06:41:34 managed-node3 python3.9[48386]: ansible-ansible.legacy.command Invoked with _raw_params=journalctl -ex echo '##################' echo List of SELinux AVCs - note list may be empty grep type=AVC /var/log/audit/audit.log echo '##################' ls -alrtF /run if [ -d /run/pcp ]; then ls -alrtF /run/pcp else echo ERROR - /run/pcp does not exist fi _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None ################## List of SELinux AVCs - note list may be empty ################## total 40 dr-xr-xr-x. 18 root root 235 Sep 24 02:40 ../ drwxr-xr-x. 2 root root 60 Sep 25 06:34 tmpfiles.d/ drwxr-xr-x. 3 root root 60 Sep 25 06:34 log/ drwxr-xr-x. 2 root root 40 Sep 25 06:34 mount/ drwxr-xr-x. 4 root root 100 Sep 25 06:34 initramfs/ -r--r--r--. 1 root root 33 Sep 25 06:34 machine-id srw-rw-rw-. 1 root root 0 Sep 25 06:34 rpcbind.sock= prw-------. 1 root root 0 Sep 25 06:34 initctl| drwxr-xr-x. 5 root root 100 Sep 25 06:34 credentials/ drwx------. 2 root root 40 Sep 25 06:34 cryptsetup/ drwxr-xr-x. 2 root root 40 Sep 25 06:34 setrans/ drwxr-xr-x. 2 root root 40 Sep 25 06:34 sepermit/ drwxr-xr-x. 2 root root 40 Sep 25 06:34 faillock/ drwxr-xr-x. 2 root root 40 Sep 25 06:34 console/ drwxr-xr-x. 2 root root 40 Sep 25 06:34 motd.d/ drwx--x--x. 3 root root 60 Sep 25 06:34 sudo/ -rw-r--r--. 1 root root 0 Sep 25 06:34 motd drwxr-xr-x. 3 root root 60 Sep 25 06:34 tpm2-tss/ drwx------. 2 rpc rpc 60 Sep 25 06:34 rpcbind/ -rw-r--r--. 1 root root 4 Sep 25 06:34 auditd.pid drwxr-xr-x. 2 root root 60 Sep 25 06:34 dbus/ srw-rw-rw-. 1 root root 0 Sep 25 06:34 .heim_org.h5l.kcm-socket= drwxr-xr-x. 2 root root 60 Sep 25 06:34 irqbalance/ -rw-r--r--. 1 root root 4 Sep 25 06:34 dhclient.pid -rw-r--r--. 1 root root 619 Sep 25 06:34 dhclient.lease -rw-------. 1 root root 4 Sep 25 06:34 gssproxy.pid srw-rw-rw-. 1 root root 0 Sep 25 06:34 gssproxy.sock= drwxr-xr-x. 2 root root 60 Sep 25 06:34 chrony-dhcp/ drwxr-x---. 2 chrony chrony 80 Sep 25 06:34 chrony/ drwxr-xr-x. 3 root root 80 Sep 25 06:34 lock/ -rw-------. 1 root root 4 Sep 25 06:34 sm-notify.pid -rw-------. 1 root root 3 Sep 25 06:34 rsyslogd.pid -rw-r--r--. 1 root root 4 Sep 25 06:34 sshd.pid -rw-r--r--. 1 root root 4 Sep 25 06:34 crond.pid ----------. 1 root root 0 Sep 25 06:34 cron.reboot drwx------. 3 root root 340 Sep 25 06:34 cloud-init/ -rw-------. 1 root root 0 Sep 25 06:34 agetty.reload drwxr-xr-x. 2 root root 80 Sep 25 06:34 blkid/ drwxr-xr-x. 3 root root 60 Sep 25 06:36 user/ drwxr-xr-x. 6 root root 160 Sep 25 06:37 NetworkManager/ drwxr-xr-x. 28 root root 900 Sep 25 06:38 ./ drwxr-xr-x. 7 root root 160 Sep 25 06:39 udev/ drwxrwxr-x. 2 pcp pcp 160 Sep 25 06:41 pcp/ drwxr-xr-x. 22 root root 560 Sep 25 06:41 systemd/ -rw-rw-r--. 1 root utmp 1920 Sep 25 06:41 utmp total 12 drwxr-xr-x. 28 root root 900 Sep 25 06:38 ../ srw-rw-rw-. 1 root root 0 Sep 25 06:40 pmcd.socket= -r--r--r--. 1 root root 5 Sep 25 06:40 pmcd.pid -r--r--r--. 1 pcp pcp 5 Sep 25 06:40 pmie.pid lrwxrwxrwx. 1 pcp pcp 30 Sep 25 06:41 pmlogger.primary.socket -> /run/pcp/pmlogger.46239.socket= -r--r--r--. 1 pcp pcp 5 Sep 25 06:41 pmlogger.pid srw-rw-rw-. 1 pcp pcp 0 Sep 25 06:41 pmlogger.46239.socket= drwxrwxr-x. 2 pcp pcp 160 Sep 25 06:41 ./ TASK [Reraise error] *********************************************************** task path: /tmp/collections-PvV/ansible_collections/fedora/linux_system_roles/tests/metrics/handle_test_failure.yml:17 Wednesday 25 September 2024 06:41:34 -0400 (0:00:00.467) 0:00:06.924 *** fatal: [managed-node3]: FAILED! => { "changed": false } MSG: {'path': '/etc/redis', 'failed': True, 'msg': 'chown failed: failed to look up user redis', 'uid': 0, 'gid': 0, 'owner': 'root', 'group': 'root', 'mode': '0755', 'state': 'directory', 'secontext': 'unconfined_u:object_r:etc_t:s0', 'size': 6, 'invocation': {'module_args': {'path': '/etc/redis', 'state': 'directory', 'owner': 'redis', 'group': 'root', 'mode': '0750', 'recurse': False, 'force': False, 'follow': True, 'modification_time_format': '%Y%m%d%H%M.%S', 'access_time_format': '%Y%m%d%H%M.%S', 'unsafe_writes': False, '_original_basename': None, '_diff_peek': None, 'src': None, 'modification_time': None, 'access_time': None, 'seuser': None, 'serole': None, 'selevel': None, 'setype': None, 'attributes': None}}, '_ansible_no_log': False, 'changed': False} TASK [Get final state of services] ********************************************* task path: /tmp/collections-PvV/ansible_collections/fedora/linux_system_roles/tests/metrics/restore_services_state.yml:3 Wednesday 25 September 2024 06:41:34 -0400 (0:00:00.020) 0:00:06.945 *** ok: [managed-node3] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autofs.service": { "name": "autofs.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "avahi-daemon.service": { "name": "avahi-daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd-restricted.service": { "name": "chronyd-restricted.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "cpupower.service": { "name": "cpupower.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-broker.service": { "name": "dbus-broker.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "active", "status": "alias" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "inactive", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "grub2-systemd-integration.service": { "name": "grub2-systemd-integration.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "hv_kvp_daemon.service": { "name": "hv_kvp_daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "logrotate.service": { "name": "logrotate.service", "source": "systemd", "state": "stopped", "status": "static" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "modprobe@.service": { "name": "modprobe@.service", "source": "systemd", "state": "unknown", "status": "static" }, "modprobe@configfs.service": { "name": "modprobe@configfs.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@drm.service": { "name": "modprobe@drm.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@fuse.service": { "name": "modprobe@fuse.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "network.service": { "name": "network.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "static" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "oddjobd.service": { "name": "oddjobd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "pam_namespace.service": { "name": "pam_namespace.service", "source": "systemd", "state": "inactive", "status": "static" }, "pcp-reboot-init.service": { "name": "pcp-reboot-init.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "pmcd.service": { "name": "pmcd.service", "source": "systemd", "state": "running", "status": "enabled" }, "pmfind.service": { "name": "pmfind.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "pmie.service": { "name": "pmie.service", "source": "systemd", "state": "running", "status": "enabled" }, "pmie_check.service": { "name": "pmie_check.service", "source": "systemd", "state": "stopped", "status": "static" }, "pmie_daily.service": { "name": "pmie_daily.service", "source": "systemd", "state": "stopped", "status": "static" }, "pmie_farm.service": { "name": "pmie_farm.service", "source": "systemd", "state": "running", "status": "disabled" }, "pmie_farm_check.service": { "name": "pmie_farm_check.service", "source": "systemd", "state": "stopped", "status": "static" }, "pmlogger.service": { "name": "pmlogger.service", "source": "systemd", "state": "running", "status": "enabled" }, "pmlogger_check.service": { "name": "pmlogger_check.service", "source": "systemd", "state": "stopped", "status": "static" }, "pmlogger_daily.service": { "name": "pmlogger_daily.service", "source": "systemd", "state": "stopped", "status": "static" }, "pmlogger_farm.service": { "name": "pmlogger_farm.service", "source": "systemd", "state": "running", "status": "disabled" }, "pmlogger_farm_check.service": { "name": "pmlogger_farm_check.service", "source": "systemd", "state": "stopped", "status": "static" }, "pmproxy.service": { "name": "pmproxy.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quotaon.service": { "name": "quotaon.service", "source": "systemd", "state": "inactive", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rdisc.service": { "name": "rdisc.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "redis.service": { "name": "redis.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-svcgssd.service": { "name": "rpc-svcgssd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpmdb-rebuild.service": { "name": "rpmdb-rebuild.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "selinux-check-proper-disable.service": { "name": "selinux-check-proper-disable.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "serial-getty@ttyS0.service": { "name": "serial-getty@ttyS0.service", "source": "systemd", "state": "running", "status": "active" }, "snapd.seeded.service": { "name": "snapd.seeded.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bless-boot.service": { "name": "systemd-bless-boot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-boot-check-no-failures.service": { "name": "systemd-boot-check-no-failures.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-boot-random-seed.service": { "name": "systemd-boot-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-boot-update.service": { "name": "systemd-boot-update.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-growfs-root.service": { "name": "systemd-growfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-growfs@.service": { "name": "systemd-growfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-resume@.service": { "name": "systemd-hibernate-resume@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-journald@.service": { "name": "systemd-journald@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-network-generator.service": { "name": "systemd-network-generator.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-networkd-wait-online.service": { "name": "systemd-networkd-wait-online.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-pcrfs-root.service": { "name": "systemd-pcrfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pcrfs@.service": { "name": "systemd-pcrfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrmachine.service": { "name": "systemd-pcrmachine.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-initrd.service": { "name": "systemd-pcrphase-initrd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-sysinit.service": { "name": "systemd-pcrphase-sysinit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase.service": { "name": "systemd-pcrphase.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-quotacheck.service": { "name": "systemd-quotacheck.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-repart.service": { "name": "systemd-repart.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysext.service": { "name": "systemd-sysext.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-sysupdate-reboot.service": { "name": "systemd-sysupdate-reboot.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysupdate.service": { "name": "systemd-sysupdate.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles.service": { "name": "systemd-tmpfiles.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "teamd@.service": { "name": "teamd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "active" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "active" }, "valkey-sentinel.service": { "name": "valkey-sentinel.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "valkey.service": { "name": "valkey.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ypbind.service": { "name": "ypbind.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "yppasswdd.service": { "name": "yppasswdd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ypserv.service": { "name": "ypserv.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ypxfrd.service": { "name": "ypxfrd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "zabbix-agent.service": { "name": "zabbix-agent.service", "source": "systemd", "state": "stopped", "status": "not-found" } } }, "changed": false } TASK [Restore state of services] *********************************************** task path: /tmp/collections-PvV/ansible_collections/fedora/linux_system_roles/tests/metrics/restore_services_state.yml:9 Wednesday 25 September 2024 06:41:35 -0400 (0:00:01.536) 0:00:08.482 *** ok: [managed-node3] => (item=pmcd) => { "ansible_loop_var": "item", "changed": false, "item": "pmcd", "name": "pmcd", "state": "started", "status": { "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0", "ActiveEnterTimestamp": "Wed 2024-09-25 06:40:26 EDT", "ActiveEnterTimestampMonotonic": "367524092", "ActiveExitTimestamp": "Wed 2024-09-25 06:40:25 EDT", "ActiveExitTimestampMonotonic": "366643884", "ActiveState": "active", "After": "sysinit.target avahi-daemon.service system.slice basic.target network-online.target systemd-journald.socket", "AllowIsolate": "no", "AssertResult": "yes", "AssertTimestamp": "Wed 2024-09-25 06:40:26 EDT", "AssertTimestampMonotonic": "367244916", "Before": "pmlogger.service zabbix-agent.service pmie.service pmproxy.service multi-user.target shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "2654966000", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Wed 2024-09-25 06:40:26 EDT", "ConditionTimestampMonotonic": "367244913", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroup": "/system.slice/pmcd.service", "ControlGroupId": "4811", "ControlPID": "0", "CoredumpFilter": "0x33", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "Performance Metrics Collector Daemon", "DevicePolicy": "auto", "Documentation": "\"man:pmcd(1)\"", "DynamicUser": "no", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "33333", "ExecMainStartTimestamp": "Wed 2024-09-25 06:40:26 EDT", "ExecMainStartTimestampMonotonic": "367524057", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/libexec/pcp/lib/pmcd ; argv[]=/usr/libexec/pcp/lib/pmcd start-systemd ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/libexec/pcp/lib/pmcd ; argv[]=/usr/libexec/pcp/lib/pmcd start-systemd ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStop": "{ path=/usr/libexec/pcp/lib/pmcd ; argv[]=/usr/libexec/pcp/lib/pmcd stop-systemd ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopEx": "{ path=/usr/libexec/pcp/lib/pmcd ; argv[]=/usr/libexec/pcp/lib/pmcd stop-systemd ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FinalKillSignal": "9", "FragmentPath": "/usr/lib/systemd/system/pmcd.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "18446744073709551615", "IOReadOperations": "18446744073709551615", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "18446744073709551615", "IOWriteOperations": "18446744073709551615", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "pmcd.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestamp": "Wed 2024-09-25 06:40:26 EDT", "InactiveEnterTimestampMonotonic": "367244303", "InactiveExitTimestamp": "Wed 2024-09-25 06:40:26 EDT", "InactiveExitTimestampMonotonic": "367249489", "InvocationID": "ec050dd52f1d47d880e32123b1977c5f", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13964", "LimitNPROCSoft": "13964", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13964", "LimitSIGPENDINGSoft": "13964", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "33333", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "infinity", "MemoryCurrent": "56410112", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "pmcd.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "all", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "PIDFile": "/run/pcp/pmcd.pid", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "system.slice sysinit.target", "Restart": "always", "RestartKillSignal": "15", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system.slice", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestamp": "Wed 2024-09-25 06:40:26 EDT", "StateChangeTimestampMonotonic": "367524092", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "running", "SuccessAction": "none", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "10", "TasksMax": "22342", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "enabled", "UtmpMode": "init", "WantedBy": "pmlogger.service multi-user.target pmie.service", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } ok: [managed-node3] => (item=pmlogger) => { "ansible_loop_var": "item", "changed": false, "item": "pmlogger", "name": "pmlogger", "state": "started", "status": { "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0", "ActiveEnterTimestamp": "Wed 2024-09-25 06:41:22 EDT", "ActiveEnterTimestampMonotonic": "423512573", "ActiveExitTimestamp": "Wed 2024-09-25 06:41:21 EDT", "ActiveExitTimestampMonotonic": "422549456", "ActiveState": "active", "After": "systemd-journald.socket pmcd.service sysinit.target network-online.target basic.target system.slice pcp-reboot-init.service", "AllowIsolate": "no", "AssertResult": "yes", "AssertTimestamp": "Wed 2024-09-25 06:41:21 EDT", "AssertTimestampMonotonic": "422617498", "Before": "pmlogger_check.timer shutdown.target pmlogger_farm.service pmlogger_daily.timer multi-user.target", "BindsTo": "pmlogger_farm.service pmlogger_daily.timer pmlogger_check.timer", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "1489417000", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Wed 2024-09-25 06:41:21 EDT", "ConditionTimestampMonotonic": "422617494", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ConsistsOf": "pmlogger_farm.service", "ControlGroup": "/system.slice/pmlogger.service", "ControlGroupId": "5070", "ControlPID": "0", "CoredumpFilter": "0x33", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "Performance Metrics Archive Logger", "DevicePolicy": "auto", "Documentation": "\"man:pmlogger(1)\"", "DynamicUser": "no", "Environment": "PMLOGGER_CHECK_PARAMS=--only-primary", "EnvironmentFiles": "/etc/sysconfig/pmlogger (ignore_errors=yes)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "46239", "ExecMainStartTimestamp": "Wed 2024-09-25 06:41:22 EDT", "ExecMainStartTimestampMonotonic": "423512536", "ExecMainStatus": "0", "ExecStart": "{ path=/etc/pcp/pmlogger/rc ; argv[]=/etc/pcp/pmlogger/rc start-systemd ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/etc/pcp/pmlogger/rc ; argv[]=/etc/pcp/pmlogger/rc start-systemd ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStop": "{ path=/etc/pcp/pmlogger/rc ; argv[]=/etc/pcp/pmlogger/rc stop-systemd ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopEx": "{ path=/etc/pcp/pmlogger/rc ; argv[]=/etc/pcp/pmlogger/rc stop-systemd ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FinalKillSignal": "9", "FragmentPath": "/usr/lib/systemd/system/pmlogger.service", "FreezerState": "running", "GID": "993", "Group": "pcp", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "18446744073709551615", "IOReadOperations": "18446744073709551615", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "18446744073709551615", "IOWriteOperations": "18446744073709551615", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "pmlogger.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestamp": "Wed 2024-09-25 06:41:21 EDT", "InactiveEnterTimestampMonotonic": "422617030", "InactiveExitTimestamp": "Wed 2024-09-25 06:41:21 EDT", "InactiveExitTimestampMonotonic": "422624369", "InvocationID": "541b287c743a465cb4bc26ba1ebe3736", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13964", "LimitNPROCSoft": "13964", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13964", "LimitSIGPENDINGSoft": "13964", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "46239", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "infinity", "MemoryCurrent": "3309568", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "pmlogger.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "all", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "PIDFile": "/run/pcp/pmlogger.pid", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "system.slice sysinit.target", "Restart": "always", "RestartKillSignal": "15", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system.slice", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestamp": "Wed 2024-09-25 06:41:22 EDT", "StateChangeTimestampMonotonic": "423512573", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "running", "SuccessAction": "none", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "1", "TasksMax": "22342", "TimeoutAbortUSec": "2min", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "2min", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "2min", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "993", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "enabled", "User": "pcp", "UtmpMode": "init", "WantedBy": "multi-user.target", "Wants": "pmcd.service", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } ok: [managed-node3] => (item=pmie) => { "ansible_loop_var": "item", "changed": false, "item": "pmie", "name": "pmie", "state": "started", "status": { "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0", "ActiveEnterTimestamp": "Wed 2024-09-25 06:40:36 EDT", "ActiveEnterTimestampMonotonic": "377225462", "ActiveExitTimestamp": "Wed 2024-09-25 06:40:35 EDT", "ActiveExitTimestampMonotonic": "376796446", "ActiveState": "active", "After": "basic.target sysinit.target system.slice network-online.target pmcd.service systemd-journald.socket pcp-reboot-init.service", "AllowIsolate": "no", "AssertResult": "yes", "AssertTimestamp": "Wed 2024-09-25 06:40:36 EDT", "AssertTimestampMonotonic": "377023121", "Before": "shutdown.target pmie_daily.timer pmie_farm.service multi-user.target pmie_check.timer", "BindsTo": "pmie_daily.timer pmie_farm.service pmie_check.timer", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "277464000", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Wed 2024-09-25 06:40:36 EDT", "ConditionTimestampMonotonic": "377023118", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ConsistsOf": "pmie_farm.service", "ControlGroup": "/system.slice/pmie.service", "ControlGroupId": "4848", "ControlPID": "0", "CoredumpFilter": "0x33", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "Performance Metrics Inference Engine", "DevicePolicy": "auto", "Documentation": "\"man:pmie(1)\"", "DynamicUser": "no", "Environment": "PMIE_CHECK_PARAMS=--only-primary", "EnvironmentFiles": "/etc/sysconfig/pmie (ignore_errors=yes)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "36714", "ExecMainStartTimestamp": "Wed 2024-09-25 06:40:36 EDT", "ExecMainStartTimestampMonotonic": "377225429", "ExecMainStatus": "0", "ExecStart": "{ path=/etc/pcp/pmie/rc ; argv[]=/etc/pcp/pmie/rc start-systemd ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/etc/pcp/pmie/rc ; argv[]=/etc/pcp/pmie/rc start-systemd ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStop": "{ path=/etc/pcp/pmie/rc ; argv[]=/etc/pcp/pmie/rc stop-systemd ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopEx": "{ path=/etc/pcp/pmie/rc ; argv[]=/etc/pcp/pmie/rc stop-systemd ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FinalKillSignal": "9", "FragmentPath": "/usr/lib/systemd/system/pmie.service", "FreezerState": "running", "GID": "993", "Group": "pcp", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "18446744073709551615", "IOReadOperations": "18446744073709551615", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "18446744073709551615", "IOWriteOperations": "18446744073709551615", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "pmie.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestamp": "Wed 2024-09-25 06:40:36 EDT", "InactiveEnterTimestampMonotonic": "377022602", "InactiveExitTimestamp": "Wed 2024-09-25 06:40:36 EDT", "InactiveExitTimestampMonotonic": "377027391", "InvocationID": "2d4fb7c4593f481389ed704049dc9040", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13964", "LimitNPROCSoft": "13964", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13964", "LimitSIGPENDINGSoft": "13964", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "36714", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "infinity", "MemoryCurrent": "1695744", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "pmie.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "all", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "PIDFile": "/run/pcp/pmie.pid", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "system.slice sysinit.target", "Restart": "always", "RestartKillSignal": "15", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system.slice", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestamp": "Wed 2024-09-25 06:40:36 EDT", "StateChangeTimestampMonotonic": "377225462", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "running", "SuccessAction": "none", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "1", "TasksMax": "22342", "TimeoutAbortUSec": "2min", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "2min", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "2min", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "993", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "enabled", "User": "pcp", "UtmpMode": "init", "WantedBy": "multi-user.target", "Wants": "pmcd.service", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } ok: [managed-node3] => (item=pmproxy) => { "ansible_loop_var": "item", "changed": false, "item": "pmproxy", "name": "pmproxy", "state": "stopped", "status": { "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0", "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "system.slice pcp-reboot-init.service network-online.target basic.target redis.service sysinit.target avahi-daemon.service systemd-journald.socket pmcd.service", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "Proxy for Performance Metrics Collector Daemon", "DevicePolicy": "auto", "Documentation": "\"man:pmproxy(1)\"", "DynamicUser": "no", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/etc/pcp/pmproxy/rc ; argv[]=/etc/pcp/pmproxy/rc start-systemd ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/etc/pcp/pmproxy/rc ; argv[]=/etc/pcp/pmproxy/rc start-systemd ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FinalKillSignal": "9", "FragmentPath": "/usr/lib/systemd/system/pmproxy.service", "FreezerState": "running", "GID": "[not set]", "Group": "pcp", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "18446744073709551615", "IOReadOperations": "18446744073709551615", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "18446744073709551615", "IOWriteOperations": "18446744073709551615", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "pmproxy.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13964", "LimitNPROCSoft": "13964", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13964", "LimitSIGPENDINGSoft": "13964", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "infinity", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "pmproxy.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "all", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "system.slice sysinit.target", "Restart": "always", "RestartKillSignal": "15", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system.slice", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestamp": "Wed 2024-09-25 06:38:58 EDT", "StateChangeTimestampMonotonic": "279193826", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22342", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "disabled", "User": "pcp", "UtmpMode": "init", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity" } } skipping: [managed-node3] => (item=redis) => { "ansible_loop_var": "item", "changed": false, "false_condition": "initial_state.ansible_facts.services[item + '.service']['status'] != 'not-found'", "item": "redis", "skip_reason": "Conditional result was False" } skipping: [managed-node3] => (item=valkey) => { "ansible_loop_var": "item", "changed": false, "false_condition": "item + '.service' in initial_state.ansible_facts.services", "item": "valkey", "skip_reason": "Conditional result was False" } skipping: [managed-node3] => (item=grafana-server) => { "ansible_loop_var": "item", "changed": false, "false_condition": "item + '.service' in final_state.ansible_facts.services", "item": "grafana-server", "skip_reason": "Conditional result was False" } TASK [Stop firewall] *********************************************************** task path: /tmp/collections-PvV/ansible_collections/fedora/linux_system_roles/tests/metrics/restore_services_state.yml:29 Wednesday 25 September 2024 06:41:37 -0400 (0:00:02.142) 0:00:10.624 *** ok: [managed-node3] => { "changed": false, "name": "firewalld", "state": "stopped", "status": { "AccessSELinuxContext": "system_u:object_r:firewalld_unit_file_t:s0", "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "sysinit.target dbus.socket dbus-broker.service basic.target system.slice polkit.service", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "network-pre.target shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "BusName": "org.fedoraproject.FirewallD1", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "yes", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "ebtables.service shutdown.target ipset.service ip6tables.service iptables.service", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "firewalld - dynamic firewall daemon", "DevicePolicy": "auto", "Documentation": "\"man:firewalld(1)\"", "DynamicUser": "no", "EnvironmentFiles": "/etc/sysconfig/firewalld (ignore_errors=yes)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecReload": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecReloadEx": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStart": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FinalKillSignal": "9", "FragmentPath": "/usr/lib/systemd/system/firewalld.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "18446744073709551615", "IOReadOperations": "18446744073709551615", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "18446744073709551615", "IOWriteOperations": "18446744073709551615", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "firewalld.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13964", "LimitNPROCSoft": "13964", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13964", "LimitSIGPENDINGSoft": "13964", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "infinity", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "firewalld.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "sysinit.target dbus.socket system.slice", "Restart": "no", "RestartKillSignal": "15", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system.slice", "StandardError": "null", "StandardInput": "null", "StandardOutput": "null", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22342", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "dbus", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "enabled", "UnitFileState": "disabled", "UtmpMode": "init", "Wants": "network-pre.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity" } } PLAY RECAP ********************************************************************* managed-node3 : ok=15 changed=1 unreachable=0 failed=1 skipped=9 rescued=1 ignored=0 Wednesday 25 September 2024 06:41:38 -0400 (0:00:00.524) 0:00:11.148 *** =============================================================================== fedora.linux_system_roles.private_metrics_subrole_keyserver : Install key server packages --- 2.54s /tmp/collections-PvV/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_keyserver/tasks/main.yml:31 Restore state of services ----------------------------------------------- 2.14s /tmp/collections-PvV/ansible_collections/fedora/linux_system_roles/tests/metrics/restore_services_state.yml:9 Get initial state of services ------------------------------------------- 1.59s /tmp/collections-PvV/ansible_collections/fedora/linux_system_roles/tests/metrics/get_services_state.yml:3 Get final state of services --------------------------------------------- 1.54s /tmp/collections-PvV/ansible_collections/fedora/linux_system_roles/tests/metrics/restore_services_state.yml:3 Gathering Facts --------------------------------------------------------- 1.01s /tmp/collections-PvV/ansible_collections/fedora/linux_system_roles/tests/metrics/tests_verify_fullstack.yml:9 Stop firewall ----------------------------------------------------------- 0.52s /tmp/collections-PvV/ansible_collections/fedora/linux_system_roles/tests/metrics/restore_services_state.yml:29 Collect logs ------------------------------------------------------------ 0.47s /tmp/collections-PvV/ansible_collections/fedora/linux_system_roles/tests/metrics/handle_test_failure.yml:2 fedora.linux_system_roles.private_metrics_subrole_keyserver : Ensure key server configuration directory exists --- 0.43s /tmp/collections-PvV/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_keyserver/tasks/main.yml:37 fedora.linux_system_roles.private_metrics_subrole_keyserver : Check if system is ostree --- 0.40s /tmp/collections-PvV/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_keyserver/tasks/main.yml:22 fedora.linux_system_roles.private_metrics_subrole_keyserver : Set platform/version specific variables --- 0.07s /tmp/collections-PvV/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_keyserver/tasks/main.yml:4 Setup metric querying service. ------------------------------------------ 0.04s /tmp/collections-PvV/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:75 Stop test --------------------------------------------------------------- 0.04s /tmp/collections-PvV/ansible_collections/fedora/linux_system_roles/tests/metrics/tests_verify_fullstack.yml:18 Run the role ------------------------------------------------------------ 0.03s /tmp/collections-PvV/ansible_collections/fedora/linux_system_roles/tests/metrics/tests_verify_fullstack.yml:28 fedora.linux_system_roles.metrics : Setup metrics access for roles ------ 0.03s /tmp/collections-PvV/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:28 Configure Elasticsearch metrics ----------------------------------------- 0.03s /tmp/collections-PvV/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:35 Handle failure case ----------------------------------------------------- 0.03s /tmp/collections-PvV/ansible_collections/fedora/linux_system_roles/tests/metrics/tests_verify_fullstack.yml:49 Configure SQL Server metrics. ------------------------------------------- 0.03s /tmp/collections-PvV/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:50 Configure Postfix metrics. ---------------------------------------------- 0.03s /tmp/collections-PvV/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:58 fedora.linux_system_roles.private_metrics_subrole_keyserver : Set flag to indicate system is ostree --- 0.02s /tmp/collections-PvV/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_keyserver/tasks/main.yml:27 Include vault variables ------------------------------------------------- 0.02s /tmp/collections-PvV/ansible_collections/fedora/linux_system_roles/tests/metrics/tests_verify_fullstack.yml:5