ansible-playbook [core 2.16.13] config file = None configured module search path = ['/root/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules'] ansible python module location = /usr/local/lib/python3.12/site-packages/ansible ansible collection location = /tmp/collections-h0K executable location = /usr/local/bin/ansible-playbook python version = 3.12.1 (main, Feb 21 2024, 14:18:26) [GCC 8.5.0 20210514 (Red Hat 8.5.0-21)] (/usr/bin/python3.12) jinja version = 3.1.4 libyaml = True No config file found; using defaults running playbook inside collection fedora.linux_system_roles statically imported: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/tests/metrics/get_services_state.yml statically imported: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/tests/metrics/restore_services_state.yml redirecting (type: callback) ansible.builtin.debug to ansible.posix.debug redirecting (type: callback) ansible.builtin.debug to ansible.posix.debug redirecting (type: callback) ansible.builtin.profile_tasks to ansible.posix.profile_tasks Skipping callback 'default', as we already have a stdout callback. Skipping callback 'minimal', as we already have a stdout callback. Skipping callback 'oneline', as we already have a stdout callback. PLAYBOOK: tests_verify_fullstack.yml ******************************************* 2 plays in /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/tests/metrics/tests_verify_fullstack.yml PLAY [all] ********************************************************************* TASK [Include vault variables] ************************************************* task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/tests/metrics/tests_verify_fullstack.yml:5 Tuesday 19 November 2024 14:39:00 -0500 (0:00:00.013) 0:00:00.013 ****** ok: [managed-node3] => { "ansible_facts": { "pcptest_pw": { "__ansible_vault": "$ANSIBLE_VAULT;1.1;AES256\n65343431623161346664373330646165636437656265656632613961363839303132393064663934\n3137396633373562393466633037356533326566343338350a386238333034336162333932313162\n62643937336534356131376134303463306466316433366636643562633637376336653034646334\n3063663466333735390a333330366461386166633233373133326237323663333831653232646566\n3363\n" } }, "ansible_included_var_files": [ "/tmp/metrics-fpf/tests/vars/vault-variables.yml" ], "changed": false } PLAY [Test the full PCP and Grafana stack] ************************************* TASK [Gathering Facts] ********************************************************* task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/tests/metrics/tests_verify_fullstack.yml:9 Tuesday 19 November 2024 14:39:00 -0500 (0:00:00.025) 0:00:00.039 ****** ok: [managed-node3] TASK [Stop test] *************************************************************** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/tests/metrics/tests_verify_fullstack.yml:18 Tuesday 19 November 2024 14:39:01 -0500 (0:00:01.361) 0:00:01.401 ****** META: end_host conditional evaluated to False, continuing execution for managed-node3 skipping: [managed-node3] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node3" } MSG: end_host conditional evaluated to false, continuing execution for managed-node3 TASK [Get initial state of services] ******************************************* task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/tests/metrics/get_services_state.yml:3 Tuesday 19 November 2024 14:39:01 -0500 (0:00:00.054) 0:00:01.455 ****** ok: [managed-node3] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "auto-cpufreq.service": { "name": "auto-cpufreq.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "avahi-daemon.service": { "name": "avahi-daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "chrony-dnssrv@.service": { "name": "chrony-dnssrv@.service", "source": "systemd", "state": "unknown", "status": "static" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "cpupower.service": { "name": "cpupower.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "static" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "dbus-org.freedesktop.portable1.service": { "name": "dbus-org.freedesktop.portable1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "running", "status": "static" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "ebtables.service": { "name": "ebtables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "inactive", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "grafana-server.service": { "name": "grafana-server.service", "source": "systemd", "state": "running", "status": "enabled" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "halt-local.service": { "name": "halt-local.service", "source": "systemd", "state": "inactive", "status": "static" }, "hv_kvp_daemon.service": { "name": "hv_kvp_daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "import-state.service": { "name": "import-state.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "iprdump.service": { "name": "iprdump.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "iprinit.service": { "name": "iprinit.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "iprupdate.service": { "name": "iprupdate.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "loadmodules.service": { "name": "loadmodules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "mariadb.service": { "name": "mariadb.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "messagebus.service": { "name": "messagebus.service", "source": "systemd", "state": "active", "status": "static" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "modprobe@efi_pstore.service": { "name": "modprobe@efi_pstore.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "mysqld.service": { "name": "mysqld.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "network.service": { "name": "network.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-convert.service": { "name": "nfs-convert.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "oddjobd.service": { "name": "oddjobd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "plymouth-halt.service": { "name": "plymouth-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-kexec.service": { "name": "plymouth-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-poweroff.service": { "name": "plymouth-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-quit.service": { "name": "plymouth-quit.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-read-write.service": { "name": "plymouth-read-write.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-reboot.service": { "name": "plymouth-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-switch-root-initramfs.service": { "name": "plymouth-switch-root-initramfs.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-switch-root.service": { "name": "plymouth-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "pmcd.service": { "name": "pmcd.service", "source": "systemd", "state": "running", "status": "enabled" }, "pmfind.service": { "name": "pmfind.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "pmie.service": { "name": "pmie.service", "source": "systemd", "state": "running", "status": "enabled" }, "pmie_check.service": { "name": "pmie_check.service", "source": "systemd", "state": "stopped", "status": "static" }, "pmie_daily.service": { "name": "pmie_daily.service", "source": "systemd", "state": "stopped", "status": "static" }, "pmie_farm.service": { "name": "pmie_farm.service", "source": "systemd", "state": "running", "status": "disabled" }, "pmie_farm_check.service": { "name": "pmie_farm_check.service", "source": "systemd", "state": "stopped", "status": "static" }, "pmlogger.service": { "name": "pmlogger.service", "source": "systemd", "state": "running", "status": "enabled" }, "pmlogger_check.service": { "name": "pmlogger_check.service", "source": "systemd", "state": "stopped", "status": "static" }, "pmlogger_daily.service": { "name": "pmlogger_daily.service", "source": "systemd", "state": "stopped", "status": "static" }, "pmlogger_daily_report.service": { "name": "pmlogger_daily_report.service", "source": "systemd", "state": "inactive", "status": "static" }, "pmlogger_farm.service": { "name": "pmlogger_farm.service", "source": "systemd", "state": "running", "status": "disabled" }, "pmlogger_farm_check.service": { "name": "pmlogger_farm_check.service", "source": "systemd", "state": "stopped", "status": "static" }, "pmproxy.service": { "name": "pmproxy.service", "source": "systemd", "state": "running", "status": "enabled" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "running", "status": "static" }, "postgresql.service": { "name": "postgresql.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "power-profiles-daemon.service": { "name": "power-profiles-daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quotaon.service": { "name": "quotaon.service", "source": "systemd", "state": "inactive", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rdisc.service": { "name": "rdisc.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "redis-sentinel.service": { "name": "redis-sentinel.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "redis.service": { "name": "redis.service", "source": "systemd", "state": "running", "status": "enabled" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "snapd.seeded.service": { "name": "snapd.seeded.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "active", "status": "enabled" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-plymouth.service": { "name": "systemd-ask-password-plymouth.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-resume@.service": { "name": "systemd-hibernate-resume@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-networkd-wait-online.service": { "name": "systemd-networkd-wait-online.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-portabled.service": { "name": "systemd-portabled.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-quotacheck.service": { "name": "systemd-quotacheck.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-resolved.service": { "name": "systemd-resolved.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "masked" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "tcsd.service": { "name": "tcsd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "teamd@.service": { "name": "teamd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "timedatex.service": { "name": "timedatex.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "tlp.service": { "name": "tlp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "tuned.service": { "name": "tuned.service", "source": "systemd", "state": "running", "status": "enabled" }, "unbound-anchor.service": { "name": "unbound-anchor.service", "source": "systemd", "state": "stopped", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "active" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "active" }, "ypbind.service": { "name": "ypbind.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "yppasswdd.service": { "name": "yppasswdd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ypserv.service": { "name": "ypserv.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ypxfrd.service": { "name": "ypxfrd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "zabbix-agent.service": { "name": "zabbix-agent.service", "source": "systemd", "state": "stopped", "status": "not-found" } } }, "changed": false } TASK [Run the role] ************************************************************ task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/tests/metrics/tests_verify_fullstack.yml:28 Tuesday 19 November 2024 14:39:03 -0500 (0:00:01.932) 0:00:03.388 ****** TASK [fedora.linux_system_roles.metrics : Ensure ansible_facts used by role] *** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:3 Tuesday 19 November 2024 14:39:03 -0500 (0:00:00.042) 0:00:03.430 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__metrics_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.metrics : Add Elasticsearch to metrics domain list] *** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:8 Tuesday 19 November 2024 14:39:03 -0500 (0:00:00.024) 0:00:03.455 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "metrics_from_elasticsearch | d(false) | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.metrics : Add SQL Server to metrics domain list] *** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:13 Tuesday 19 November 2024 14:39:03 -0500 (0:00:00.020) 0:00:03.475 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "metrics_from_mssql | d(false) | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.metrics : Add Postfix to metrics domain list] *** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:18 Tuesday 19 November 2024 14:39:03 -0500 (0:00:00.020) 0:00:03.495 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "metrics_from_postfix | d(false) | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.metrics : Add bpftrace to metrics domain list] *** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:23 Tuesday 19 November 2024 14:39:03 -0500 (0:00:00.027) 0:00:03.523 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "metrics_from_bpftrace | d(false) | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.metrics : Setup metrics access for roles] ****** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:28 Tuesday 19 November 2024 14:39:03 -0500 (0:00:00.025) 0:00:03.548 ****** ok: [managed-node3] => { "ansible_facts": { "__metrics_accounts": [ { "saslpassword": "metrics", "sasluser": "metrics", "user": "metrics" } ] }, "changed": false } TASK [Configure Elasticsearch metrics] ***************************************** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:35 Tuesday 19 November 2024 14:39:03 -0500 (0:00:00.027) 0:00:03.576 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "metrics_from_elasticsearch | d(false) | bool or metrics_into_elasticsearch | d(false) | bool\n", "skip_reason": "Conditional result was False" } TASK [Configure SQL Server metrics.] ******************************************* task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:50 Tuesday 19 November 2024 14:39:03 -0500 (0:00:00.020) 0:00:03.597 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "metrics_from_mssql | d(false) | bool", "skip_reason": "Conditional result was False" } TASK [Configure Postfix metrics.] ********************************************** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:58 Tuesday 19 November 2024 14:39:03 -0500 (0:00:00.020) 0:00:03.617 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "metrics_from_postfix | d(false) | bool", "skip_reason": "Conditional result was False" } TASK [Setup bpftrace metrics.] ************************************************* task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:66 Tuesday 19 November 2024 14:39:03 -0500 (0:00:00.020) 0:00:03.638 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "metrics_from_bpftrace | d(false) | bool", "skip_reason": "Conditional result was False" } TASK [Setup metric querying service.] ****************************************** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:75 Tuesday 19 November 2024 14:39:03 -0500 (0:00:00.020) 0:00:03.659 ****** TASK [fedora.linux_system_roles.private_metrics_subrole_keyserver : Set platform/version specific variables] *** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_keyserver/tasks/main.yml:4 Tuesday 19 November 2024 14:39:03 -0500 (0:00:00.048) 0:00:03.708 ****** ok: [managed-node3] => (item=/tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_keyserver/vars/default.yml) => { "ansible_facts": { "__keyserver_conf_link": "/etc" }, "ansible_included_var_files": [ "/tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_keyserver/vars/default.yml" ], "ansible_loop_var": "item", "changed": false, "item": "/tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_keyserver/vars/default.yml" } skipping: [managed-node3] => (item=/tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_keyserver/vars/RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "item is file", "item": "/tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_keyserver/vars/RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node3] => (item=/tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_keyserver/vars/RedHat_x86_64.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "item is file", "item": "/tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_keyserver/vars/RedHat_x86_64.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node3] => (item=/tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_keyserver/vars/CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "item is file", "item": "/tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_keyserver/vars/CentOS.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node3] => (item=/tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_keyserver/vars/CentOS_x86_64.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "item is file", "item": "/tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_keyserver/vars/CentOS_x86_64.yml", "skip_reason": "Conditional result was False" } ok: [managed-node3] => (item=/tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_keyserver/vars/CentOS_8.yml) => { "ansible_facts": { "__keyserver_conf_file": "redis.conf", "__keyserver_conf_path": "/etc/redis", "__keyserver_loaded_modules": [], "__keyserver_name": "redis", "__keyserver_packages": [ "redis" ], "__keyserver_packages_extra": [] }, "ansible_included_var_files": [ "/tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_keyserver/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "/tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_keyserver/vars/CentOS_8.yml" } skipping: [managed-node3] => (item=/tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_keyserver/vars/CentOS_8_x86_64.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "item is file", "item": "/tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_keyserver/vars/CentOS_8_x86_64.yml", "skip_reason": "Conditional result was False" } ok: [managed-node3] => (item=/tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_keyserver/vars/CentOS_8.yml) => { "ansible_facts": { "__keyserver_conf_file": "redis.conf", "__keyserver_conf_path": "/etc/redis", "__keyserver_loaded_modules": [], "__keyserver_name": "redis", "__keyserver_packages": [ "redis" ], "__keyserver_packages_extra": [] }, "ansible_included_var_files": [ "/tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_keyserver/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "/tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_keyserver/vars/CentOS_8.yml" } skipping: [managed-node3] => (item=/tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_keyserver/vars/CentOS_8_x86_64.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "item is file", "item": "/tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_keyserver/vars/CentOS_8_x86_64.yml", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.private_metrics_subrole_keyserver : Check if system is ostree] *** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_keyserver/tasks/main.yml:22 Tuesday 19 November 2024 14:39:03 -0500 (0:00:00.079) 0:00:03.787 ****** ok: [managed-node3] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.private_metrics_subrole_keyserver : Set flag to indicate system is ostree] *** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_keyserver/tasks/main.yml:27 Tuesday 19 November 2024 14:39:04 -0500 (0:00:00.621) 0:00:04.409 ****** ok: [managed-node3] => { "ansible_facts": { "__ansible_pcp_is_ostree": false }, "changed": false } TASK [fedora.linux_system_roles.private_metrics_subrole_keyserver : Install key server packages] *** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_keyserver/tasks/main.yml:31 Tuesday 19 November 2024 14:39:04 -0500 (0:00:00.029) 0:00:04.438 ****** ok: [managed-node3] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do TASK [fedora.linux_system_roles.private_metrics_subrole_keyserver : Ensure key server configuration directory exists] *** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_keyserver/tasks/main.yml:37 Tuesday 19 November 2024 14:39:07 -0500 (0:00:03.186) 0:00:07.624 ****** ok: [managed-node3] => { "changed": false, "gid": 0, "group": "root", "mode": "0750", "owner": "redis", "path": "/etc/redis", "secontext": "unconfined_u:object_r:etc_t:s0", "size": 24, "state": "directory", "uid": 993 } TASK [fedora.linux_system_roles.private_metrics_subrole_keyserver : Ensure key server is configured] *** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_keyserver/tasks/main.yml:46 Tuesday 19 November 2024 14:39:08 -0500 (0:00:00.662) 0:00:08.287 ****** ok: [managed-node3] => (item=/tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_keyserver/templates/CentOS_8_keyserver.conf.j2) => { "ansible_loop_var": "item", "changed": false, "checksum": "06d89d7886a9a4126fb9baddfd6c1c2e3ab5271a", "dest": "/etc/redis/redis.conf", "gid": 0, "group": "root", "item": "/tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_keyserver/templates/CentOS_8_keyserver.conf.j2", "mode": "0640", "owner": "redis", "path": "/etc/redis/redis.conf", "secontext": "system_u:object_r:etc_t:s0", "size": 61569, "state": "file", "uid": 993 } TASK [fedora.linux_system_roles.private_metrics_subrole_keyserver : Ensure key server configuration symlink exists] *** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_keyserver/tasks/main.yml:60 Tuesday 19 November 2024 14:39:09 -0500 (0:00:00.907) 0:00:09.194 ****** ok: [managed-node3] => { "changed": false, "dest": "/etc/redis.conf", "gid": 0, "group": "root", "mode": "0777", "owner": "root", "secontext": "unconfined_u:object_r:etc_t:s0", "size": 21, "src": "/etc/redis/redis.conf", "state": "link", "uid": 0 } TASK [fedora.linux_system_roles.private_metrics_subrole_keyserver : Ensure key server service is running and enabled on boot] *** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_keyserver/tasks/main.yml:67 Tuesday 19 November 2024 14:39:09 -0500 (0:00:00.559) 0:00:09.754 ****** ok: [managed-node3] => { "changed": false, "enabled": true, "name": "redis", "state": "started", "status": { "ActiveEnterTimestamp": "Tue 2024-11-19 14:38:52 EST", "ActiveEnterTimestampMonotonic": "525349390", "ActiveExitTimestamp": "Tue 2024-11-19 14:38:52 EST", "ActiveExitTimestampMonotonic": "525309967", "ActiveState": "active", "After": "system.slice sysinit.target -.mount systemd-journald.socket network.target basic.target", "AllowIsolate": "no", "AllowedCPUs": "", "AllowedMemoryNodes": "", "AmbientCapabilities": "", "AssertResult": "yes", "AssertTimestamp": "Tue 2024-11-19 14:38:52 EST", "AssertTimestampMonotonic": "525338420", "Before": "pmproxy.service multi-user.target shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "no", "CPUAffinity": "", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Tue 2024-11-19 14:38:52 EST", "ConditionTimestampMonotonic": "525338419", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroup": "/system.slice/redis.service", "ControlPID": "0", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "Redis persistent key-value database", "DevicePolicy": "auto", "DropInPaths": "/etc/systemd/system/redis.service.d/limit.conf", "DynamicUser": "no", "EffectiveCPUs": "", "EffectiveMemoryNodes": "", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "33658", "ExecMainStartTimestamp": "Tue 2024-11-19 14:38:52 EST", "ExecMainStartTimestampMonotonic": "525339503", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/redis-server ; argv[]=/usr/bin/redis-server /etc/redis.conf --supervised systemd ; ignore_errors=no ; start_time=[Tue 2024-11-19 14:38:52 EST] ; stop_time=[n/a] ; pid=33658 ; code=(null) ; status=0/0 }", "ExecStop": "{ path=/usr/libexec/redis-shutdown ; argv[]=/usr/libexec/redis-shutdown ; ignore_errors=no ; start_time=[Tue 2024-11-19 14:38:52 EST] ; stop_time=[Tue 2024-11-19 14:38:52 EST] ; pid=33643 ; code=exited ; status=0 }", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FragmentPath": "/usr/lib/systemd/system/redis.service", "FreezerState": "running", "GID": "990", "Group": "redis", "GuessMainPID": "yes", "IOAccounting": "no", "IOSchedulingClass": "0", "IOSchedulingPriority": "0", "IOWeight": "[not set]", "IPAccounting": "no", "IPEgressBytes": "18446744073709551615", "IPEgressPackets": "18446744073709551615", "IPIngressBytes": "18446744073709551615", "IPIngressPackets": "18446744073709551615", "Id": "redis.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestamp": "Tue 2024-11-19 14:38:52 EST", "InactiveEnterTimestampMonotonic": "525337426", "InactiveExitTimestamp": "Tue 2024-11-19 14:38:52 EST", "InactiveExitTimestampMonotonic": "525339544", "InvocationID": "2c8bc08d3272449486f3a6f6b38f9b49", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "0", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "65536", "LimitMEMLOCKSoft": "65536", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "10240", "LimitNOFILESoft": "10240", "LimitNPROC": "14003", "LimitNPROCSoft": "14003", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "14003", "LimitSIGPENDINGSoft": "14003", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "33658", "MemoryAccounting": "yes", "MemoryCurrent": "9629696", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "MountFlags": "", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAMask": "", "NUMAPolicy": "n/a", "Names": "redis.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "main", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "PermissionsStartOnly": "no", "Perpetual": "no", "PrivateDevices": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "sysinit.target system.slice -.mount", "RequiresMountsFor": "/run/redis", "Restart": "no", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectory": "redis", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system.slice", "StandardError": "inherit", "StandardInput": "null", "StandardInputData": "", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestamp": "Tue 2024-11-19 14:38:52 EST", "StateChangeTimestampMonotonic": "525349390", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "running", "SuccessAction": "none", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "0", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "4", "TasksMax": "22405", "TimeoutStartUSec": "1min 30s", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "993", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "enabled", "User": "redis", "UtmpMode": "init", "WantedBy": "multi-user.target", "WatchdogTimestamp": "Tue 2024-11-19 14:38:52 EST", "WatchdogTimestampMonotonic": "525349388", "WatchdogUSec": "0" } } TASK [Setup metric collection service.] **************************************** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:81 Tuesday 19 November 2024 14:39:10 -0500 (0:00:01.076) 0:00:10.830 ****** TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Set platform/version specific variables] *** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/main.yml:4 Tuesday 19 November 2024 14:39:10 -0500 (0:00:00.063) 0:00:10.894 ****** ok: [managed-node3] => (item=/tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/vars/default.yml) => { "ansible_facts": {}, "ansible_included_var_files": [ "/tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/vars/default.yml" ], "ansible_loop_var": "item", "changed": false, "item": "/tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/vars/default.yml" } ok: [managed-node3] => (item=/tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/vars/RedHat.yml) => { "ansible_facts": { "__pcp_pmcd_defaults_path": "/etc/sysconfig/pmcd", "__pcp_pmlogger_defaults_path": "/etc/sysconfig/pmlogger", "__pcp_pmlogger_timers_path": "/etc/sysconfig/pmlogger_timers", "__pcp_pmproxy_defaults_path": "/etc/sysconfig/pmproxy" }, "ansible_included_var_files": [ "/tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/vars/RedHat.yml" ], "ansible_loop_var": "item", "changed": false, "item": "/tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/vars/RedHat.yml" } skipping: [managed-node3] => (item=/tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/vars/CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "item is file", "item": "/tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/vars/CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node3] => (item=/tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/vars/CentOS_8.yml) => { "ansible_facts": { "__pcp_packages_extra": [ "pcp-zeroconf" ], "__pcp_sasl_mechlist": "scram-sha-256", "__pcp_sasl_packages": [ "cyrus-sasl-lib", "cyrus-sasl-scram" ] }, "ansible_included_var_files": [ "/tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "/tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/vars/CentOS_8.yml" } ok: [managed-node3] => (item=/tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/vars/CentOS_8.yml) => { "ansible_facts": { "__pcp_packages_extra": [ "pcp-zeroconf" ], "__pcp_sasl_mechlist": "scram-sha-256", "__pcp_sasl_packages": [ "cyrus-sasl-lib", "cyrus-sasl-scram" ] }, "ansible_included_var_files": [ "/tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "/tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/vars/CentOS_8.yml" } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Check if system is ostree] *** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/main.yml:18 Tuesday 19 November 2024 14:39:10 -0500 (0:00:00.058) 0:00:10.952 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __ansible_pcp_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Set flag to indicate system is ostree] *** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/main.yml:23 Tuesday 19 November 2024 14:39:11 -0500 (0:00:00.027) 0:00:10.980 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __ansible_pcp_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Install Performance Co-Pilot packages] *** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/main.yml:27 Tuesday 19 November 2024 14:39:11 -0500 (0:00:00.024) 0:00:11.004 ****** ok: [managed-node3] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Install authentication packages] *** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/main.yml:33 Tuesday 19 November 2024 14:39:14 -0500 (0:00:03.010) 0:00:14.015 ****** ok: [managed-node3] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Include pmcd] **** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/main.yml:42 Tuesday 19 November 2024 14:39:17 -0500 (0:00:03.032) 0:00:17.048 ****** included: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmcd.yml for managed-node3 TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : List optional metric collection agents to be enabled] *** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmcd.yml:4 Tuesday 19 November 2024 14:39:17 -0500 (0:00:00.075) 0:00:17.123 ****** skipping: [managed-node3] => { "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Extract metric collection configuration file content] *** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmcd.yml:9 Tuesday 19 November 2024 14:39:17 -0500 (0:00:00.039) 0:00:17.163 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "cat", "/etc/pcp/pmcd/pmcd.conf" ], "delta": "0:00:00.002850", "end": "2024-11-19 14:39:17.754679", "rc": 0, "start": "2024-11-19 14:39:17.751829" } STDOUT: # # Name Id IPC IPC Params File/Cmd # Performance Metrics Domain Specifications # This file is automatically generated during the build root 1 pipe binary /var/lib/pcp/pmdas/root/pmdaroot pmcd 2 dso pmcd_init /var/lib/pcp/pmdas/pmcd/pmda_pmcd.so proc 3 pipe binary /var/lib/pcp/pmdas/proc/pmdaproc -d 3 pmproxy 4 dso pmproxy_init /var/lib/pcp/pmdas/mmv/pmda_mmv.so xfs 11 pipe binary /var/lib/pcp/pmdas/xfs/pmdaxfs -d 11 linux 60 pipe binary /var/lib/pcp/pmdas/linux/pmdalinux nfsclient 62 pipe binary python3 /var/lib/pcp/pmdas/nfsclient/pmdanfsclient.python mmv 70 dso mmv_init /var/lib/pcp/pmdas/mmv/pmda_mmv.so kvm 95 pipe binary /var/lib/pcp/pmdas/kvm/pmdakvm -d 95 jbd2 122 dso jbd2_init /var/lib/pcp/pmdas/jbd2/pmda_jbd2.so dm 129 pipe binary /var/lib/pcp/pmdas/dm/pmdadm -d 129 openmetrics 144 pipe binary notready python3 /var/lib/pcp/pmdas/openmetrics/pmdaopenmetrics.python bpftrace 151 pipe binary python3 /var/lib/pcp/pmdas/bpftrace/pmdabpftrace.python [access] disallow ".*" : store; disallow ":*" : store; allow "local:*" : all; TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure optional metric collection agents are enabled] *** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmcd.yml:14 Tuesday 19 November 2024 14:39:17 -0500 (0:00:00.637) 0:00:17.800 ****** skipping: [managed-node3] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure explicit metric label path exists] *** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmcd.yml:23 Tuesday 19 November 2024 14:39:17 -0500 (0:00:00.021) 0:00:17.822 ****** ok: [managed-node3] => { "changed": false, "gid": 0, "group": "root", "mode": "0755", "owner": "root", "path": "/etc/pcp/labels", "secontext": "system_u:object_r:etc_t:s0", "size": 45, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure implicit metric label path exists] *** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmcd.yml:31 Tuesday 19 November 2024 14:39:18 -0500 (0:00:00.518) 0:00:18.341 ****** ok: [managed-node3] => { "changed": false, "gid": 0, "group": "root", "mode": "0755", "owner": "root", "path": "/etc/pcp/labels/optional", "secontext": "system_u:object_r:etc_t:s0", "size": 29, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure any explicit metric labels are configured] *** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmcd.yml:39 Tuesday 19 November 2024 14:39:18 -0500 (0:00:00.575) 0:00:18.917 ****** ok: [managed-node3] => { "changed": false, "checksum": "5f36b2ea290645ee34d943220a14b54ee5ea5be5", "dest": "/etc/pcp/labels/ansible-managed", "gid": 0, "group": "root", "mode": "0644", "owner": "root", "path": "/etc/pcp/labels/ansible-managed", "secontext": "system_u:object_r:etc_t:s0", "size": 3, "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure any implicit metric labels are configured] *** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmcd.yml:46 Tuesday 19 November 2024 14:39:19 -0500 (0:00:00.931) 0:00:19.848 ****** ok: [managed-node3] => { "changed": false, "checksum": "5f36b2ea290645ee34d943220a14b54ee5ea5be5", "dest": "/etc/pcp/labels/optional/ansible-managed", "gid": 0, "group": "root", "mode": "0644", "owner": "root", "path": "/etc/pcp/labels/optional/ansible-managed", "secontext": "system_u:object_r:etc_t:s0", "size": 3, "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure performance metric collector is configured] *** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmcd.yml:53 Tuesday 19 November 2024 14:39:20 -0500 (0:00:00.871) 0:00:20.719 ****** ok: [managed-node3] => { "changed": false, "checksum": "7518789c091387cd9c322e1a8fa8aad21d4efbd3", "dest": "/etc/sysconfig/pmcd", "gid": 0, "group": "root", "mode": "0644", "owner": "root", "path": "/etc/sysconfig/pmcd", "secontext": "system_u:object_r:etc_t:s0", "size": 1627, "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure performance metric collector system accounts are configured] *** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmcd.yml:60 Tuesday 19 November 2024 14:39:21 -0500 (0:00:00.886) 0:00:21.606 ****** ok: [managed-node3] => (item={'user': 'metrics', 'sasluser': 'metrics', 'saslpassword': 'metrics'}) => { "ansible_loop_var": "item", "append": false, "changed": false, "comment": "", "group": 989, "home": "/home/metrics", "item": { "saslpassword": "metrics", "sasluser": "metrics", "user": "metrics" }, "move_home": false, "name": "metrics", "shell": "/bin/bash", "state": "present", "uid": 992 } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure performance metric collector SASL accounts are configured] *** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmcd.yml:68 Tuesday 19 November 2024 14:39:22 -0500 (0:00:00.787) 0:00:22.394 ****** ok: [managed-node3] => (item={'user': 'metrics', 'sasluser': 'metrics', 'saslpassword': 'metrics'}) => { "ansible_loop_var": "item", "changed": false, "cmd": "set -eu\nif set -o | grep -q pipefail; then\n set -o pipefail # pipefail not supported on debian, some ubuntu\nfi\nif ! sasldblistusers2 -f \"/etc/pcp/passwd.db\" | grep -q \"^metrics@\"; then\n echo \"Creating new metrics user in /etc/pcp/passwd.db\"\n echo \"metrics\" | saslpasswd2 -a pmcd \"metrics\"\n chown root:pcp \"/etc/pcp/passwd.db\"\n chmod 640 \"/etc/pcp/passwd.db\"\nfi\n", "delta": "0:00:00.008613", "end": "2024-11-19 14:39:22.870390", "item": { "saslpassword": "metrics", "sasluser": "metrics", "user": "metrics" }, "rc": 0, "start": "2024-11-19 14:39:22.861777" } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure performance metric collector authentication is configured] *** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmcd.yml:86 Tuesday 19 November 2024 14:39:22 -0500 (0:00:00.529) 0:00:22.923 ****** ok: [managed-node3] => { "changed": false, "checksum": "615d2de55ab86108da0c7e6b64988fecb4169771", "dest": "/etc/sasl2/pmcd.conf", "gid": 0, "group": "root", "mode": "0644", "owner": "root", "path": "/etc/sasl2/pmcd.conf", "secontext": "system_u:object_r:etc_t:s0", "size": 998, "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Set variable to do pmcd restart if needed] *** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmcd.yml:94 Tuesday 19 November 2024 14:39:23 -0500 (0:00:00.881) 0:00:23.805 ****** ok: [managed-node3] => { "ansible_facts": { "__pcp_restart_pmcd": false }, "changed": false } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Report performance metric collector restart state] *** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmcd.yml:99 Tuesday 19 November 2024 14:39:23 -0500 (0:00:00.045) 0:00:23.850 ****** ok: [managed-node3] => {} MSG: ['optional_agents: False', 'explicit_labels: False', 'implicit_labels: False', 'defaults_config: False', 'authentication: False', 'restart_pmcd: False'] TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure performance metric collector is running and enabled on boot] *** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmcd.yml:110 Tuesday 19 November 2024 14:39:23 -0500 (0:00:00.041) 0:00:23.892 ****** ok: [managed-node3] => { "changed": false, "enabled": true, "name": "pmcd", "state": "started", "status": { "ActiveEnterTimestamp": "Tue 2024-11-19 14:37:39 EST", "ActiveEnterTimestampMonotonic": "452428617", "ActiveExitTimestamp": "Tue 2024-11-19 14:37:38 EST", "ActiveExitTimestampMonotonic": "451383313", "ActiveState": "active", "After": "avahi-daemon.service system.slice basic.target systemd-journald.socket network-online.target sysinit.target", "AllowIsolate": "no", "AllowedCPUs": "", "AllowedMemoryNodes": "", "AmbientCapabilities": "", "AssertResult": "yes", "AssertTimestamp": "Tue 2024-11-19 14:37:39 EST", "AssertTimestampMonotonic": "452118064", "Before": "shutdown.target pmlogger.service multi-user.target pmproxy.service zabbix-agent.service pmie.service", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "no", "CPUAffinity": "", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Tue 2024-11-19 14:37:39 EST", "ConditionTimestampMonotonic": "452118063", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroup": "/system.slice/pmcd.service", "ControlPID": "0", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "Performance Metrics Collector Daemon", "DevicePolicy": "auto", "Documentation": "man:pmcd(1)", "DynamicUser": "no", "EffectiveCPUs": "", "EffectiveMemoryNodes": "", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "21673", "ExecMainStartTimestamp": "Tue 2024-11-19 14:37:39 EST", "ExecMainStartTimestampMonotonic": "452428598", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/libexec/pcp/lib/pmcd ; argv[]=/usr/libexec/pcp/lib/pmcd start-systemd ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStop": "{ path=/usr/libexec/pcp/lib/pmcd ; argv[]=/usr/libexec/pcp/lib/pmcd stop-systemd ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FragmentPath": "/usr/lib/systemd/system/pmcd.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOSchedulingClass": "0", "IOSchedulingPriority": "0", "IOWeight": "[not set]", "IPAccounting": "no", "IPEgressBytes": "18446744073709551615", "IPEgressPackets": "18446744073709551615", "IPIngressBytes": "18446744073709551615", "IPIngressPackets": "18446744073709551615", "Id": "pmcd.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestamp": "Tue 2024-11-19 14:37:39 EST", "InactiveEnterTimestampMonotonic": "452116883", "InactiveExitTimestamp": "Tue 2024-11-19 14:37:39 EST", "InactiveExitTimestampMonotonic": "452119123", "InvocationID": "ca073c479f2a4f3297bd172d338c087c", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "0", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "65536", "LimitMEMLOCKSoft": "65536", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "262144", "LimitNOFILESoft": "1024", "LimitNPROC": "14003", "LimitNPROCSoft": "14003", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "14003", "LimitSIGPENDINGSoft": "14003", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "21673", "MemoryAccounting": "yes", "MemoryCurrent": "165691392", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "MountFlags": "", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAMask": "", "NUMAPolicy": "n/a", "Names": "pmcd.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "all", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "PIDFile": "/run/pcp/pmcd.pid", "PermissionsStartOnly": "no", "Perpetual": "no", "PrivateDevices": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "sysinit.target system.slice", "Restart": "always", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system.slice", "StandardError": "inherit", "StandardInput": "null", "StandardInputData": "", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestamp": "Tue 2024-11-19 14:37:39 EST", "StateChangeTimestampMonotonic": "452428617", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "running", "SuccessAction": "none", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "0", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "22", "TasksMax": "22405", "TimeoutStartUSec": "1min 30s", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "enabled", "UtmpMode": "init", "WantedBy": "pmlogger.service multi-user.target pmie.service", "WatchdogTimestamp": "Tue 2024-11-19 14:37:39 EST", "WatchdogTimestampMonotonic": "452428613", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure performance metric collector is restarted and enabled on boot] *** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmcd.yml:117 Tuesday 19 November 2024 14:39:24 -0500 (0:00:00.689) 0:00:24.581 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__pcp_restart_pmcd | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Include pmie] **** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/main.yml:45 Tuesday 19 November 2024 14:39:24 -0500 (0:00:00.025) 0:00:24.607 ****** included: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmie.yml for managed-node3 TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure extra performance rule group directories exist] *** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmie.yml:4 Tuesday 19 November 2024 14:39:24 -0500 (0:00:00.075) 0:00:24.682 ****** ok: [managed-node3] => (item=network) => { "ansible_loop_var": "item", "changed": false, "gid": 0, "group": "root", "item": "network", "mode": "0755", "owner": "root", "path": "/etc/pcp/pmieconf/network", "secontext": "system_u:object_r:etc_t:s0", "size": 78, "state": "directory", "uid": 0 } ok: [managed-node3] => (item=power) => { "ansible_loop_var": "item", "changed": false, "gid": 0, "group": "root", "item": "power", "mode": "0755", "owner": "root", "path": "/etc/pcp/pmieconf/power", "secontext": "system_u:object_r:etc_t:s0", "size": 30, "state": "directory", "uid": 0 } ok: [managed-node3] => (item=zeroconf) => { "ansible_loop_var": "item", "changed": false, "gid": 0, "group": "root", "item": "zeroconf", "mode": "0755", "owner": "root", "path": "/etc/pcp/pmieconf/zeroconf", "secontext": "system_u:object_r:etc_t:s0", "size": 25, "state": "directory", "uid": 0 } ok: [managed-node3] => (item=filesys) => { "ansible_loop_var": "item", "changed": false, "gid": 0, "group": "root", "item": "filesys", "mode": "0755", "owner": "root", "path": "/etc/pcp/pmieconf/filesys", "secontext": "system_u:object_r:etc_t:s0", "size": 38, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure extra performance rule group link directories exist] *** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmie.yml:14 Tuesday 19 November 2024 14:39:26 -0500 (0:00:02.093) 0:00:26.776 ****** ok: [managed-node3] => (item=network) => { "ansible_loop_var": "item", "changed": false, "gid": 0, "group": "root", "item": "network", "mode": "0755", "owner": "root", "path": "/var/lib/pcp/config/pmieconf/network", "secontext": "system_u:object_r:pcp_var_lib_t:s0", "size": 78, "state": "directory", "uid": 0 } ok: [managed-node3] => (item=power) => { "ansible_loop_var": "item", "changed": false, "gid": 0, "group": "root", "item": "power", "mode": "0755", "owner": "root", "path": "/var/lib/pcp/config/pmieconf/power", "secontext": "system_u:object_r:pcp_var_lib_t:s0", "size": 30, "state": "directory", "uid": 0 } ok: [managed-node3] => (item=zeroconf) => { "ansible_loop_var": "item", "changed": false, "gid": 0, "group": "root", "item": "zeroconf", "mode": "0755", "owner": "root", "path": "/var/lib/pcp/config/pmieconf/zeroconf", "secontext": "system_u:object_r:pcp_var_lib_t:s0", "size": 25, "state": "directory", "uid": 0 } ok: [managed-node3] => (item=filesys) => { "ansible_loop_var": "item", "changed": false, "gid": 0, "group": "root", "item": "filesys", "mode": "0755", "owner": "root", "path": "/var/lib/pcp/config/pmieconf/filesys", "secontext": "system_u:object_r:pcp_var_lib_t:s0", "size": 38, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure extra performance rules are installed for targeted hosts] *** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmie.yml:24 Tuesday 19 November 2024 14:39:28 -0500 (0:00:02.021) 0:00:28.798 ****** ok: [managed-node3] => (item=network/tcplistenoverflows) => { "ansible_loop_var": "item", "changed": false, "checksum": "608d8a6ac6ee33bb86b77d28ba24fbcd378db43d", "dest": "/etc/pcp/pmieconf/network/tcplistenoverflows", "gid": 0, "group": "root", "item": "network/tcplistenoverflows", "mode": "0644", "owner": "root", "path": "/etc/pcp/pmieconf/network/tcplistenoverflows", "secontext": "system_u:object_r:etc_t:s0", "size": 971, "state": "file", "uid": 0 } ok: [managed-node3] => (item=network/tcpqfulldocookies) => { "ansible_loop_var": "item", "changed": false, "checksum": "3256a5c2e8d07a20d8e97a08c0ab163252b0beae", "dest": "/etc/pcp/pmieconf/network/tcpqfulldocookies", "gid": 0, "group": "root", "item": "network/tcpqfulldocookies", "mode": "0644", "owner": "root", "path": "/etc/pcp/pmieconf/network/tcpqfulldocookies", "secontext": "system_u:object_r:etc_t:s0", "size": 1131, "state": "file", "uid": 0 } ok: [managed-node3] => (item=network/tcpqfulldrops) => { "ansible_loop_var": "item", "changed": false, "checksum": "37b2bd7f2430bd9678ab078c5e69a53bea556524", "dest": "/etc/pcp/pmieconf/network/tcpqfulldrops", "gid": 0, "group": "root", "item": "network/tcpqfulldrops", "mode": "0644", "owner": "root", "path": "/etc/pcp/pmieconf/network/tcpqfulldrops", "secontext": "system_u:object_r:etc_t:s0", "size": 1129, "state": "file", "uid": 0 } ok: [managed-node3] => (item=power/thermal_throttle) => { "ansible_loop_var": "item", "changed": false, "checksum": "1d53d6182709617c8f633339652d8d9e75f3b603", "dest": "/etc/pcp/pmieconf/power/thermal_throttle", "gid": 0, "group": "root", "item": "power/thermal_throttle", "mode": "0644", "owner": "root", "path": "/etc/pcp/pmieconf/power/thermal_throttle", "secontext": "system_u:object_r:etc_t:s0", "size": 1153, "state": "file", "uid": 0 } ok: [managed-node3] => (item=zeroconf/all_threads) => { "ansible_loop_var": "item", "changed": false, "checksum": "65169db16dcaa224c211373001adc3addf1031c4", "dest": "/etc/pcp/pmieconf/zeroconf/all_threads", "gid": 0, "group": "root", "item": "zeroconf/all_threads", "mode": "0644", "owner": "root", "path": "/etc/pcp/pmieconf/zeroconf/all_threads", "secontext": "system_u:object_r:etc_t:s0", "size": 840, "state": "file", "uid": 0 } ok: [managed-node3] => (item=filesys/vfs_files) => { "ansible_loop_var": "item", "changed": false, "checksum": "cd5d85dfb8eebd7d9737d56e78bd969dafa3999c", "dest": "/etc/pcp/pmieconf/filesys/vfs_files", "gid": 0, "group": "root", "item": "filesys/vfs_files", "mode": "0644", "owner": "root", "path": "/etc/pcp/pmieconf/filesys/vfs_files", "secontext": "system_u:object_r:etc_t:s0", "size": 969, "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure performance rule actions are installed for targeted hosts] *** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmie.yml:34 Tuesday 19 November 2024 14:39:34 -0500 (0:00:05.176) 0:00:33.974 ****** ok: [managed-node3] => { "ansible_facts": { "local_pmie": "default" }, "changed": false } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Check if global pmie webhook action is configured] *** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmie.yml:38 Tuesday 19 November 2024 14:39:34 -0500 (0:00:00.022) 0:00:33.997 ****** skipping: [managed-node3] => (item=default) => { "ansible_loop_var": "item", "changed": false, "false_condition": "pcp_pmie_endpoint | length > 0", "item": "default", "skip_reason": "Conditional result was False" } skipping: [managed-node3] => { "changed": false } MSG: All items skipped TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Configure global webhook action] *** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmie.yml:50 Tuesday 19 November 2024 14:39:34 -0500 (0:00:00.032) 0:00:34.029 ****** skipping: [managed-node3] => (item={'changed': False, 'skipped': True, 'skip_reason': 'Conditional result was False', 'false_condition': 'pcp_pmie_endpoint | length > 0', 'item': 'default', 'ansible_loop_var': 'item'}) => { "ansible_loop_var": "item", "changed": false, "false_condition": "pcp_pmie_endpoint | length > 0", "item": { "ansible_loop_var": "item", "changed": false, "false_condition": "pcp_pmie_endpoint | length > 0", "item": "default", "skip_reason": "Conditional result was False", "skipped": true }, "skip_reason": "Conditional result was False" } skipping: [managed-node3] => { "changed": false } MSG: All items skipped TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Check if global webhook endpoint is configured] *** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmie.yml:63 Tuesday 19 November 2024 14:39:34 -0500 (0:00:00.026) 0:00:34.056 ****** ok: [managed-node3] => (item=default) => { "ansible_loop_var": "item", "backup": "", "changed": false, "found": 0, "item": "default" } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Configure global webhook endpoint] *** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmie.yml:73 Tuesday 19 November 2024 14:39:34 -0500 (0:00:00.643) 0:00:34.699 ****** skipping: [managed-node3] => (item={'changed': False, 'found': 0, 'msg': '', 'backup': '', 'diff': [{'before': '', 'after': '', 'before_header': '/var/lib/pcp/config/pmie/config.default (content)', 'after_header': '/var/lib/pcp/config/pmie/config.default (content)'}, {'before_header': '/var/lib/pcp/config/pmie/config.default (file attributes)', 'after_header': '/var/lib/pcp/config/pmie/config.default (file attributes)'}], 'invocation': {'module_args': {'state': 'absent', 'path': '/var/lib/pcp/config/pmie/config.default', 'regexp': '//.*global webhook_endpoint = ""', 'backrefs': False, 'create': False, 'backup': False, 'firstmatch': False, 'unsafe_writes': False, 'search_string': None, 'line': None, 'insertafter': None, 'insertbefore': None, 'validate': None, 'mode': None, 'owner': None, 'group': None, 'seuser': None, 'serole': None, 'selevel': None, 'setype': None, 'attributes': None}}, 'failed': False, 'item': 'default', 'ansible_loop_var': 'item'}) => { "ansible_loop_var": "item", "changed": false, "false_condition": "pcp_pmie_endpoint | length > 0", "item": { "ansible_loop_var": "item", "backup": "", "changed": false, "diff": [ { "after": "", "after_header": "/var/lib/pcp/config/pmie/config.default (content)", "before": "", "before_header": "/var/lib/pcp/config/pmie/config.default (content)" }, { "after_header": "/var/lib/pcp/config/pmie/config.default (file attributes)", "before_header": "/var/lib/pcp/config/pmie/config.default (file attributes)" } ], "failed": false, "found": 0, "invocation": { "module_args": { "attributes": null, "backrefs": false, "backup": false, "create": false, "firstmatch": false, "group": null, "insertafter": null, "insertbefore": null, "line": null, "mode": null, "owner": null, "path": "/var/lib/pcp/config/pmie/config.default", "regexp": "//.*global webhook_endpoint = \"\"", "search_string": null, "selevel": null, "serole": null, "setype": null, "seuser": null, "state": "absent", "unsafe_writes": false, "validate": null } }, "item": "default", "msg": "" }, "skip_reason": "Conditional result was False" } skipping: [managed-node3] => { "changed": false } MSG: All items skipped TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure extra rules symlinks have been created for targeted hosts] *** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmie.yml:86 Tuesday 19 November 2024 14:39:34 -0500 (0:00:00.050) 0:00:34.750 ****** changed: [managed-node3] => (item=network/tcplistenoverflows) => { "ansible_loop_var": "item", "changed": true, "dest": "/var/lib/pcp/config/pmieconf/network/tcplistenoverflows", "gid": 0, "group": "root", "item": "network/tcplistenoverflows", "mode": "0777", "owner": "root", "secontext": "unconfined_u:object_r:pcp_var_lib_t:s0", "size": 44, "src": "/etc/pcp/pmieconf/network/tcplistenoverflows", "state": "link", "uid": 0 } changed: [managed-node3] => (item=network/tcpqfulldocookies) => { "ansible_loop_var": "item", "changed": true, "dest": "/var/lib/pcp/config/pmieconf/network/tcpqfulldocookies", "gid": 0, "group": "root", "item": "network/tcpqfulldocookies", "mode": "0777", "owner": "root", "secontext": "unconfined_u:object_r:pcp_var_lib_t:s0", "size": 43, "src": "/etc/pcp/pmieconf/network/tcpqfulldocookies", "state": "link", "uid": 0 } changed: [managed-node3] => (item=network/tcpqfulldrops) => { "ansible_loop_var": "item", "changed": true, "dest": "/var/lib/pcp/config/pmieconf/network/tcpqfulldrops", "gid": 0, "group": "root", "item": "network/tcpqfulldrops", "mode": "0777", "owner": "root", "secontext": "unconfined_u:object_r:pcp_var_lib_t:s0", "size": 39, "src": "/etc/pcp/pmieconf/network/tcpqfulldrops", "state": "link", "uid": 0 } changed: [managed-node3] => (item=power/thermal_throttle) => { "ansible_loop_var": "item", "changed": true, "dest": "/var/lib/pcp/config/pmieconf/power/thermal_throttle", "gid": 0, "group": "root", "item": "power/thermal_throttle", "mode": "0777", "owner": "root", "secontext": "unconfined_u:object_r:pcp_var_lib_t:s0", "size": 40, "src": "/etc/pcp/pmieconf/power/thermal_throttle", "state": "link", "uid": 0 } changed: [managed-node3] => (item=zeroconf/all_threads) => { "ansible_loop_var": "item", "changed": true, "dest": "/var/lib/pcp/config/pmieconf/zeroconf/all_threads", "gid": 0, "group": "root", "item": "zeroconf/all_threads", "mode": "0777", "owner": "root", "secontext": "unconfined_u:object_r:pcp_var_lib_t:s0", "size": 38, "src": "/etc/pcp/pmieconf/zeroconf/all_threads", "state": "link", "uid": 0 } ok: [managed-node3] => (item=filesys/vfs_files) => { "ansible_loop_var": "item", "changed": false, "dest": "/var/lib/pcp/config/pmieconf/filesys/vfs_files", "gid": 0, "group": "root", "item": "filesys/vfs_files", "mode": "0777", "owner": "root", "secontext": "unconfined_u:object_r:pcp_var_lib_t:s0", "size": 35, "src": "/etc/pcp/pmieconf/filesys/vfs_files", "state": "link", "uid": 0 } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Enable performance metric inference for targeted hosts (with control.d)] *** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmie.yml:95 Tuesday 19 November 2024 14:39:37 -0500 (0:00:03.199) 0:00:37.949 ****** skipping: [managed-node3] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Enable performance metric inference for targeted hosts (single control)] *** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmie.yml:106 Tuesday 19 November 2024 14:39:38 -0500 (0:00:00.021) 0:00:37.971 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "pcp_single_control | d(true) | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Set variable to do pmie restart if needed] *** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmie.yml:116 Tuesday 19 November 2024 14:39:38 -0500 (0:00:00.022) 0:00:37.993 ****** ok: [managed-node3] => { "ansible_facts": { "__pcp_restart_pmie": true }, "changed": false } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure performance metric inference is running and enabled on boot] *** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmie.yml:120 Tuesday 19 November 2024 14:39:38 -0500 (0:00:00.031) 0:00:38.025 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __pcp_restart_pmie | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure performance metric inference is restarted and enabled on boot] *** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmie.yml:127 Tuesday 19 November 2024 14:39:38 -0500 (0:00:00.019) 0:00:38.045 ****** changed: [managed-node3] => { "changed": true, "enabled": true, "name": "pmie", "state": "started", "status": { "ActiveEnterTimestamp": "Tue 2024-11-19 14:37:56 EST", "ActiveEnterTimestampMonotonic": "469200402", "ActiveExitTimestamp": "Tue 2024-11-19 14:37:55 EST", "ActiveExitTimestampMonotonic": "468750803", "ActiveState": "active", "After": "system.slice pmcd.service systemd-journald.socket network-online.target sysinit.target basic.target", "AllowIsolate": "no", "AllowedCPUs": "", "AllowedMemoryNodes": "", "AmbientCapabilities": "", "AssertResult": "yes", "AssertTimestamp": "Tue 2024-11-19 14:37:55 EST", "AssertTimestampMonotonic": "468981491", "Before": "multi-user.target shutdown.target pmie_farm.service pmie_check.timer pmie_daily.timer", "BindsTo": "pmie_farm.service pmie_check.timer pmie_daily.timer", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "no", "CPUAffinity": "", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Tue 2024-11-19 14:37:55 EST", "ConditionTimestampMonotonic": "468981489", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ConsistsOf": "pmie_farm.service", "ControlGroup": "/system.slice/pmie.service", "ControlPID": "0", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "Performance Metrics Inference Engine", "DevicePolicy": "auto", "Documentation": "man:pmie(1)", "DynamicUser": "no", "EffectiveCPUs": "", "EffectiveMemoryNodes": "", "Environment": "PMIE_CHECK_PARAMS=--only-primary", "EnvironmentFiles": "/etc/sysconfig/pmie (ignore_errors=yes)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "27816", "ExecMainStartTimestamp": "Tue 2024-11-19 14:37:56 EST", "ExecMainStartTimestampMonotonic": "469200381", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/libexec/pcp/lib/pmie ; argv[]=/usr/libexec/pcp/lib/pmie start-systemd ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStop": "{ path=/usr/libexec/pcp/lib/pmie ; argv[]=/usr/libexec/pcp/lib/pmie stop-systemd ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FragmentPath": "/usr/lib/systemd/system/pmie.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOSchedulingClass": "0", "IOSchedulingPriority": "0", "IOWeight": "[not set]", "IPAccounting": "no", "IPEgressBytes": "18446744073709551615", "IPEgressPackets": "18446744073709551615", "IPIngressBytes": "18446744073709551615", "IPIngressPackets": "18446744073709551615", "Id": "pmie.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestamp": "Tue 2024-11-19 14:37:55 EST", "InactiveEnterTimestampMonotonic": "468980412", "InactiveExitTimestamp": "Tue 2024-11-19 14:37:55 EST", "InactiveExitTimestampMonotonic": "468982541", "InvocationID": "0112c5fd7d534d32a57fd4d649143d9f", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "0", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "65536", "LimitMEMLOCKSoft": "65536", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "262144", "LimitNOFILESoft": "1024", "LimitNPROC": "14003", "LimitNPROCSoft": "14003", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "14003", "LimitSIGPENDINGSoft": "14003", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "27816", "MemoryAccounting": "yes", "MemoryCurrent": "1626112", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "MountFlags": "", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAMask": "", "NUMAPolicy": "n/a", "Names": "pmie.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "all", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "PIDFile": "/run/pcp/pmie.pid", "PermissionsStartOnly": "no", "Perpetual": "no", "PrivateDevices": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "sysinit.target system.slice", "Restart": "always", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system.slice", "StandardError": "inherit", "StandardInput": "null", "StandardInputData": "", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestamp": "Tue 2024-11-19 14:37:56 EST", "StateChangeTimestampMonotonic": "469200402", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "running", "SuccessAction": "none", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "0", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "1", "TasksMax": "22405", "TimeoutStartUSec": "2min", "TimeoutStopUSec": "2min", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "enabled", "UtmpMode": "init", "WantedBy": "multi-user.target", "Wants": "pmcd.service", "WatchdogTimestamp": "Tue 2024-11-19 14:37:56 EST", "WatchdogTimestampMonotonic": "469200399", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Include pmlogger] *** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/main.yml:48 Tuesday 19 November 2024 14:39:39 -0500 (0:00:01.220) 0:00:39.265 ****** included: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmlogger.yml for managed-node3 TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure metric log location is configured] *** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmlogger.yml:4 Tuesday 19 November 2024 14:39:39 -0500 (0:00:00.061) 0:00:39.326 ****** ok: [managed-node3] => { "backup": "", "changed": false } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure performance metric logging is configured] *** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmlogger.yml:12 Tuesday 19 November 2024 14:39:39 -0500 (0:00:00.532) 0:00:39.859 ****** ok: [managed-node3] => { "changed": false, "checksum": "67bc35973101c614e92b1990f8bebfffc39fe498", "dest": "/etc/sysconfig/pmlogger", "gid": 0, "group": "root", "mode": "0644", "owner": "root", "path": "/etc/sysconfig/pmlogger", "secontext": "system_u:object_r:etc_t:s0", "size": 1180, "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure performance metric logging retention period is set] *** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmlogger.yml:19 Tuesday 19 November 2024 14:39:40 -0500 (0:00:00.877) 0:00:40.736 ****** ok: [managed-node3] => { "changed": false, "checksum": "df7bd3b5b6f1de3af164aab81441c7251a13a298", "dest": "/etc/sysconfig/pmlogger_timers", "gid": 0, "group": "root", "mode": "0644", "owner": "root", "path": "/etc/sysconfig/pmlogger_timers", "secontext": "system_u:object_r:etc_t:s0", "size": 988, "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Enable performance metric logging for targeted hosts (with control.d)] *** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmlogger.yml:27 Tuesday 19 November 2024 14:39:41 -0500 (0:00:00.859) 0:00:41.595 ****** skipping: [managed-node3] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Enable performance metric logging for targeted hosts (single control)] *** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmlogger.yml:39 Tuesday 19 November 2024 14:39:41 -0500 (0:00:00.021) 0:00:41.617 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "pcp_single_control | d(true) | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Set variable to do pmlogger restart if needed] *** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmlogger.yml:49 Tuesday 19 November 2024 14:39:41 -0500 (0:00:00.029) 0:00:41.646 ****** ok: [managed-node3] => { "ansible_facts": { "__pcp_restart_pmlogger": false }, "changed": false } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure performance metric logging is running and enabled on boot] *** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmlogger.yml:53 Tuesday 19 November 2024 14:39:41 -0500 (0:00:00.041) 0:00:41.688 ****** ok: [managed-node3] => { "changed": false, "enabled": true, "name": "pmlogger", "state": "started", "status": { "ActiveEnterTimestamp": "Tue 2024-11-19 14:38:54 EST", "ActiveEnterTimestampMonotonic": "527917875", "ActiveExitTimestamp": "Tue 2024-11-19 14:38:53 EST", "ActiveExitTimestampMonotonic": "526836619", "ActiveState": "active", "After": "systemd-journald.socket pmcd.service system.slice basic.target network-online.target sysinit.target", "AllowIsolate": "no", "AllowedCPUs": "", "AllowedMemoryNodes": "", "AmbientCapabilities": "", "AssertResult": "yes", "AssertTimestamp": "Tue 2024-11-19 14:38:53 EST", "AssertTimestampMonotonic": "526915609", "Before": "pmlogger_daily.timer pmlogger_farm.service shutdown.target multi-user.target pmlogger_check.timer", "BindsTo": "pmlogger_daily.timer pmlogger_farm.service pmlogger_check.timer", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "no", "CPUAffinity": "", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Tue 2024-11-19 14:38:53 EST", "ConditionTimestampMonotonic": "526915607", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ConsistsOf": "pmlogger_farm.service", "ControlGroup": "/system.slice/pmlogger.service", "ControlPID": "0", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "Performance Metrics Archive Logger", "DevicePolicy": "auto", "Documentation": "man:pmlogger(1)", "DynamicUser": "no", "EffectiveCPUs": "", "EffectiveMemoryNodes": "", "Environment": "PMLOGGER_CHECK_PARAMS=--only-primary", "EnvironmentFiles": "/etc/sysconfig/pmlogger (ignore_errors=yes)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "34408", "ExecMainStartTimestamp": "Tue 2024-11-19 14:38:54 EST", "ExecMainStartTimestampMonotonic": "527917851", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/libexec/pcp/lib/pmlogger ; argv[]=/usr/libexec/pcp/lib/pmlogger start-systemd ; ignore_errors=no ; start_time=[Tue 2024-11-19 14:38:53 EST] ; stop_time=[n/a] ; pid=34181 ; code=(null) ; status=0/0 }", "ExecStop": "{ path=/usr/libexec/pcp/lib/pmlogger ; argv[]=/usr/libexec/pcp/lib/pmlogger stop-systemd ; ignore_errors=no ; start_time=[Tue 2024-11-19 14:38:53 EST] ; stop_time=[Tue 2024-11-19 14:38:53 EST] ; pid=34123 ; code=exited ; status=0 }", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FragmentPath": "/usr/lib/systemd/system/pmlogger.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOSchedulingClass": "0", "IOSchedulingPriority": "0", "IOWeight": "[not set]", "IPAccounting": "no", "IPEgressBytes": "18446744073709551615", "IPEgressPackets": "18446744073709551615", "IPIngressBytes": "18446744073709551615", "IPIngressPackets": "18446744073709551615", "Id": "pmlogger.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestamp": "Tue 2024-11-19 14:38:53 EST", "InactiveEnterTimestampMonotonic": "526914183", "InactiveExitTimestamp": "Tue 2024-11-19 14:38:53 EST", "InactiveExitTimestampMonotonic": "526916788", "InvocationID": "7c1fc5e932644c43bde7be0e813ae809", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "0", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "65536", "LimitMEMLOCKSoft": "65536", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "262144", "LimitNOFILESoft": "1024", "LimitNPROC": "14003", "LimitNPROCSoft": "14003", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "14003", "LimitSIGPENDINGSoft": "14003", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "34408", "MemoryAccounting": "yes", "MemoryCurrent": "3366912", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "MountFlags": "", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAMask": "", "NUMAPolicy": "n/a", "Names": "pmlogger.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "all", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "PIDFile": "/run/pcp/pmlogger.pid", "PermissionsStartOnly": "no", "Perpetual": "no", "PrivateDevices": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "sysinit.target system.slice", "Restart": "always", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system.slice", "StandardError": "inherit", "StandardInput": "null", "StandardInputData": "", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestamp": "Tue 2024-11-19 14:38:54 EST", "StateChangeTimestampMonotonic": "527917875", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "running", "SuccessAction": "none", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "0", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "1", "TasksMax": "22405", "TimeoutStartUSec": "2min", "TimeoutStopUSec": "2min", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "enabled", "UtmpMode": "init", "WantedBy": "multi-user.target", "Wants": "pmcd.service", "WatchdogTimestamp": "Tue 2024-11-19 14:38:54 EST", "WatchdogTimestampMonotonic": "527917872", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure performance metric logging is restarted and enabled on boot] *** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmlogger.yml:60 Tuesday 19 November 2024 14:39:42 -0500 (0:00:00.686) 0:00:42.374 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__pcp_restart_pmlogger | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Include pmproxy] *** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/main.yml:51 Tuesday 19 November 2024 14:39:42 -0500 (0:00:00.023) 0:00:42.397 ****** included: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmproxy.yml for managed-node3 TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure REST API, proxy and metric log discovery is configured] *** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmproxy.yml:4 Tuesday 19 November 2024 14:39:42 -0500 (0:00:00.082) 0:00:42.480 ****** ok: [managed-node3] => { "changed": false, "checksum": "fa1a173dfa5b1affbf6767115bdae2ce00e98ecc", "dest": "/etc/sysconfig/pmproxy", "gid": 0, "group": "root", "mode": "0644", "owner": "root", "path": "/etc/sysconfig/pmproxy", "secontext": "system_u:object_r:etc_t:s0", "size": 508, "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure REST API, proxy and log discovery is running and enabled on boot] *** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmproxy.yml:11 Tuesday 19 November 2024 14:39:43 -0500 (0:00:00.888) 0:00:43.369 ****** ok: [managed-node3] => { "changed": false, "enabled": true, "name": "pmproxy", "state": "started", "status": { "ActiveEnterTimestamp": "Tue 2024-11-19 14:38:53 EST", "ActiveEnterTimestampMonotonic": "526124272", "ActiveExitTimestamp": "Tue 2024-11-19 14:38:53 EST", "ActiveExitTimestampMonotonic": "526046694", "ActiveState": "active", "After": "network-online.target pmcd.service avahi-daemon.service system.slice redis.service sysinit.target basic.target systemd-journald.socket", "AllowIsolate": "no", "AllowedCPUs": "", "AllowedMemoryNodes": "", "AmbientCapabilities": "", "AssertResult": "yes", "AssertTimestamp": "Tue 2024-11-19 14:38:53 EST", "AssertTimestampMonotonic": "526055244", "Before": "shutdown.target multi-user.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "no", "CPUAffinity": "", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Tue 2024-11-19 14:38:53 EST", "ConditionTimestampMonotonic": "526055242", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroup": "/system.slice/pmproxy.service", "ControlPID": "0", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "Proxy for Performance Metrics Collector Daemon", "DevicePolicy": "auto", "Documentation": "man:pmproxy(1)", "DynamicUser": "no", "EffectiveCPUs": "", "EffectiveMemoryNodes": "", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "33869", "ExecMainStartTimestamp": "Tue 2024-11-19 14:38:53 EST", "ExecMainStartTimestampMonotonic": "526056381", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/libexec/pcp/lib/pmproxy ; argv[]=/usr/libexec/pcp/lib/pmproxy start-systemd ; ignore_errors=no ; start_time=[Tue 2024-11-19 14:38:53 EST] ; stop_time=[n/a] ; pid=33869 ; code=(null) ; status=0/0 }", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FragmentPath": "/usr/lib/systemd/system/pmproxy.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOSchedulingClass": "0", "IOSchedulingPriority": "0", "IOWeight": "[not set]", "IPAccounting": "no", "IPEgressBytes": "18446744073709551615", "IPEgressPackets": "18446744073709551615", "IPIngressBytes": "18446744073709551615", "IPIngressPackets": "18446744073709551615", "Id": "pmproxy.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestamp": "Tue 2024-11-19 14:38:53 EST", "InactiveEnterTimestampMonotonic": "526054276", "InactiveExitTimestamp": "Tue 2024-11-19 14:38:53 EST", "InactiveExitTimestampMonotonic": "526056432", "InvocationID": "a3ebdd486bde4426b071790a238545a2", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "0", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "65536", "LimitMEMLOCKSoft": "65536", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "262144", "LimitNOFILESoft": "1024", "LimitNPROC": "14003", "LimitNPROCSoft": "14003", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "14003", "LimitSIGPENDINGSoft": "14003", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "33869", "MemoryAccounting": "yes", "MemoryCurrent": "44294144", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "MountFlags": "", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAMask": "", "NUMAPolicy": "n/a", "Names": "pmproxy.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "all", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "PermissionsStartOnly": "no", "Perpetual": "no", "PrivateDevices": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "sysinit.target system.slice", "Restart": "always", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system.slice", "StandardError": "inherit", "StandardInput": "null", "StandardInputData": "", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestamp": "Tue 2024-11-19 14:38:53 EST", "StateChangeTimestampMonotonic": "526124272", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "running", "SuccessAction": "none", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "0", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "1", "TasksMax": "22405", "TimeoutStartUSec": "1min 30s", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "enabled", "UtmpMode": "init", "WantedBy": "multi-user.target", "WatchdogTimestamp": "Tue 2024-11-19 14:38:53 EST", "WatchdogTimestampMonotonic": "526124269", "WatchdogUSec": "0" } } TASK [Setup metric graphing service.] ****************************************** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:94 Tuesday 19 November 2024 14:39:44 -0500 (0:00:00.710) 0:00:44.079 ****** TASK [fedora.linux_system_roles.private_metrics_subrole_grafana : Set platform/version specific variables] *** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_grafana/tasks/main.yml:4 Tuesday 19 November 2024 14:39:44 -0500 (0:00:00.093) 0:00:44.172 ****** ok: [managed-node3] => (item=/tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_grafana/vars/default.yml) => { "ansible_facts": { "grafana_metrics_provider": "pcp" }, "ansible_included_var_files": [ "/tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_grafana/vars/default.yml" ], "ansible_loop_var": "item", "changed": false, "item": "/tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_grafana/vars/default.yml" } ok: [managed-node3] => (item=/tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_grafana/vars/RedHat.yml) => { "ansible_facts": { "__grafana_keyserver_datasource_alttype": "performancecopilot-valkey-datasource", "__grafana_keyserver_datasource_name": "Valkey", "__grafana_keyserver_datasource_type": "pcp-valkey-datasource", "__grafana_packages": [ "grafana" ] }, "ansible_included_var_files": [ "/tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_grafana/vars/RedHat.yml" ], "ansible_loop_var": "item", "changed": false, "item": "/tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_grafana/vars/RedHat.yml" } skipping: [managed-node3] => (item=/tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_grafana/vars/CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "item is file", "item": "/tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_grafana/vars/CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node3] => (item=/tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_grafana/vars/CentOS_8.yml) => { "ansible_facts": { "__grafana_keyserver_datasource_alttype": "performancecopilot-redis-datasource", "__grafana_keyserver_datasource_name": "Redis", "__grafana_keyserver_datasource_type": "pcp-redis-datasource", "__grafana_packages_extra": [] }, "ansible_included_var_files": [ "/tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_grafana/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "/tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_grafana/vars/CentOS_8.yml" } ok: [managed-node3] => (item=/tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_grafana/vars/CentOS_8.yml) => { "ansible_facts": { "__grafana_keyserver_datasource_alttype": "performancecopilot-redis-datasource", "__grafana_keyserver_datasource_name": "Redis", "__grafana_keyserver_datasource_type": "pcp-redis-datasource", "__grafana_packages_extra": [] }, "ansible_included_var_files": [ "/tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_grafana/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "/tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_grafana/vars/CentOS_8.yml" } TASK [fedora.linux_system_roles.private_metrics_subrole_grafana : Check if system is ostree] *** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_grafana/tasks/main.yml:18 Tuesday 19 November 2024 14:39:44 -0500 (0:00:00.085) 0:00:44.258 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __ansible_pcp_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.private_metrics_subrole_grafana : Set flag to indicate system is ostree] *** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_grafana/tasks/main.yml:23 Tuesday 19 November 2024 14:39:44 -0500 (0:00:00.029) 0:00:44.287 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __ansible_pcp_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.private_metrics_subrole_grafana : Install Grafana packages] *** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_grafana/tasks/main.yml:27 Tuesday 19 November 2024 14:39:44 -0500 (0:00:00.029) 0:00:44.316 ****** ok: [managed-node3] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do TASK [fedora.linux_system_roles.private_metrics_subrole_grafana : Get package facts now that Grafana is installed] *** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_grafana/tasks/main.yml:33 Tuesday 19 November 2024 14:39:47 -0500 (0:00:03.062) 0:00:47.379 ****** ok: [managed-node3] => { "ansible_facts": { "packages": { "NetworkManager": [ { "arch": "x86_64", "epoch": 1, "name": "NetworkManager", "release": "15.el8", "source": "rpm", "version": "1.40.16" } ], "NetworkManager-libnm": [ { "arch": "x86_64", "epoch": 1, "name": "NetworkManager-libnm", "release": "15.el8", "source": "rpm", "version": "1.40.16" } ], "NetworkManager-team": [ { "arch": "x86_64", "epoch": 1, "name": "NetworkManager-team", "release": "15.el8", "source": "rpm", "version": "1.40.16" } ], "NetworkManager-tui": [ { "arch": "x86_64", "epoch": 1, "name": "NetworkManager-tui", "release": "15.el8", "source": "rpm", "version": "1.40.16" } ], "acl": [ { "arch": "x86_64", "epoch": null, "name": "acl", "release": "3.el8", "source": "rpm", "version": "2.2.53" } ], "aspell": [ { "arch": "x86_64", "epoch": 12, "name": "aspell", "release": "22.el8", "source": "rpm", "version": "0.60.6.1" } ], "audit": [ { "arch": "x86_64", "epoch": null, "name": "audit", "release": "1.el8", "source": "rpm", "version": "3.1.2" } ], "audit-libs": [ { "arch": "x86_64", "epoch": null, "name": "audit-libs", "release": "1.el8", "source": "rpm", "version": "3.1.2" } ], "authselect": [ { "arch": "x86_64", "epoch": null, "name": "authselect", "release": "2.el8", "source": "rpm", "version": "1.2.6" } ], "authselect-compat": [ { "arch": "x86_64", "epoch": null, "name": "authselect-compat", "release": "2.el8", "source": "rpm", "version": "1.2.6" } ], "authselect-libs": [ { "arch": "x86_64", "epoch": null, "name": "authselect-libs", "release": "2.el8", "source": "rpm", "version": "1.2.6" } ], "avahi-libs": [ { "arch": "x86_64", "epoch": null, "name": "avahi-libs", "release": "27.el8", "source": "rpm", "version": "0.7" } ], "basesystem": [ { "arch": "noarch", "epoch": null, "name": "basesystem", "release": "5.el8", "source": "rpm", "version": "11" } ], "bash": [ { "arch": "x86_64", "epoch": null, "name": "bash", "release": "5.el8", "source": "rpm", "version": "4.4.20" } ], "bc": [ { "arch": "x86_64", "epoch": null, "name": "bc", "release": "5.el8", "source": "rpm", "version": "1.07.1" } ], "bcc": [ { "arch": "x86_64", "epoch": null, "name": "bcc", "release": "7.el8", "source": "rpm", "version": "0.25.0" } ], "bcc-tools": [ { "arch": "x86_64", "epoch": null, "name": "bcc-tools", "release": "7.el8", "source": "rpm", "version": "0.25.0" } ], "beakerlib": [ { "arch": "noarch", "epoch": null, "name": "beakerlib", "release": "1.el8bkr", "source": "rpm", "version": "1.29.3" } ], "beakerlib-redhat": [ { "arch": "noarch", "epoch": null, "name": "beakerlib-redhat", "release": "33.el8bkr", "source": "rpm", "version": "1" } ], "bind-export-libs": [ { "arch": "x86_64", "epoch": 32, "name": "bind-export-libs", "release": "13.el8", "source": "rpm", "version": "9.11.36" } ], "binutils": [ { "arch": "x86_64", "epoch": null, "name": "binutils", "release": "123.el8", "source": "rpm", "version": "2.30" } ], "biosdevname": [ { "arch": "x86_64", "epoch": null, "name": "biosdevname", "release": "2.el8", "source": "rpm", "version": "0.7.3" } ], "bison": [ { "arch": "x86_64", "epoch": null, "name": "bison", "release": "10.el8", "source": "rpm", "version": "3.0.4" } ], "boost-atomic": [ { "arch": "x86_64", "epoch": null, "name": "boost-atomic", "release": "13.el8", "source": "rpm", "version": "1.66.0" } ], "boost-chrono": [ { "arch": "x86_64", "epoch": null, "name": "boost-chrono", "release": "13.el8", "source": "rpm", "version": "1.66.0" } ], "boost-date-time": [ { "arch": "x86_64", "epoch": null, "name": "boost-date-time", "release": "13.el8", "source": "rpm", "version": "1.66.0" } ], "boost-filesystem": [ { "arch": "x86_64", "epoch": null, "name": "boost-filesystem", "release": "13.el8", "source": "rpm", "version": "1.66.0" } ], "boost-system": [ { "arch": "x86_64", "epoch": null, "name": "boost-system", "release": "13.el8", "source": "rpm", "version": "1.66.0" } ], "boost-thread": [ { "arch": "x86_64", "epoch": null, "name": "boost-thread", "release": "13.el8", "source": "rpm", "version": "1.66.0" } ], "boost-timer": [ { "arch": "x86_64", "epoch": null, "name": "boost-timer", "release": "13.el8", "source": "rpm", "version": "1.66.0" } ], "bpftrace": [ { "arch": "x86_64", "epoch": null, "name": "bpftrace", "release": "5.el8", "source": "rpm", "version": "0.16.0" } ], "brotli": [ { "arch": "x86_64", "epoch": null, "name": "brotli", "release": "3.el8", "source": "rpm", "version": "1.0.6" } ], "bzip2-libs": [ { "arch": "x86_64", "epoch": null, "name": "bzip2-libs", "release": "26.el8", "source": "rpm", "version": "1.0.6" } ], "c-ares": [ { "arch": "x86_64", "epoch": null, "name": "c-ares", "release": "11.el8", "source": "rpm", "version": "1.13.0" } ], "ca-certificates": [ { "arch": "noarch", "epoch": null, "name": "ca-certificates", "release": "80.0.el8", "source": "rpm", "version": "2023.2.60_v7.0.306" } ], "centos-gpg-keys": [ { "arch": "noarch", "epoch": 1, "name": "centos-gpg-keys", "release": "6.el8", "source": "rpm", "version": "8" } ], "centos-stream-release": [ { "arch": "noarch", "epoch": null, "name": "centos-stream-release", "release": "1.el8", "source": "rpm", "version": "8.6" } ], "centos-stream-repos": [ { "arch": "noarch", "epoch": null, "name": "centos-stream-repos", "release": "6.el8", "source": "rpm", "version": "8" } ], "checkpolicy": [ { "arch": "x86_64", "epoch": null, "name": "checkpolicy", "release": "1.el8", "source": "rpm", "version": "2.9" } ], "chkconfig": [ { "arch": "x86_64", "epoch": null, "name": "chkconfig", "release": "1.el8", "source": "rpm", "version": "1.19.2" } ], "chrony": [ { "arch": "x86_64", "epoch": null, "name": "chrony", "release": "1.el8", "source": "rpm", "version": "4.5" } ], "clang-libs": [ { "arch": "x86_64", "epoch": null, "name": "clang-libs", "release": "1.module_el8+767+9fa966b8", "source": "rpm", "version": "17.0.6" } ], "clang-resource-filesystem": [ { "arch": "noarch", "epoch": null, "name": "clang-resource-filesystem", "release": "1.module_el8+767+9fa966b8", "source": "rpm", "version": "17.0.6" } ], "cloud-init": [ { "arch": "noarch", "epoch": null, "name": "cloud-init", "release": "7.el8.2", "source": "rpm", "version": "23.4" } ], "cloud-utils-growpart": [ { "arch": "noarch", "epoch": null, "name": "cloud-utils-growpart", "release": "0.el8", "source": "rpm", "version": "0.33" } ], "compiler-rt": [ { "arch": "x86_64", "epoch": null, "name": "compiler-rt", "release": "1.module_el8+767+9fa966b8", "source": "rpm", "version": "17.0.6" } ], "coreutils": [ { "arch": "x86_64", "epoch": null, "name": "coreutils", "release": "15.el8", "source": "rpm", "version": "8.30" } ], "coreutils-common": [ { "arch": "x86_64", "epoch": null, "name": "coreutils-common", "release": "15.el8", "source": "rpm", "version": "8.30" } ], "cpio": [ { "arch": "x86_64", "epoch": null, "name": "cpio", "release": "11.el8", "source": "rpm", "version": "2.12" } ], "cpp": [ { "arch": "x86_64", "epoch": null, "name": "cpp", "release": "22.el8", "source": "rpm", "version": "8.5.0" } ], "cracklib": [ { "arch": "x86_64", "epoch": null, "name": "cracklib", "release": "15.el8", "source": "rpm", "version": "2.9.6" } ], "cracklib-dicts": [ { "arch": "x86_64", "epoch": null, "name": "cracklib-dicts", "release": "15.el8", "source": "rpm", "version": "2.9.6" } ], "cronie": [ { "arch": "x86_64", "epoch": null, "name": "cronie", "release": "10.el8", "source": "rpm", "version": "1.5.2" } ], "cronie-anacron": [ { "arch": "x86_64", "epoch": null, "name": "cronie-anacron", "release": "10.el8", "source": "rpm", "version": "1.5.2" } ], "crontabs": [ { "arch": "noarch", "epoch": null, "name": "crontabs", "release": "17.20190603git.el8", "source": "rpm", "version": "1.11" } ], "crypto-policies": [ { "arch": "noarch", "epoch": null, "name": "crypto-policies", "release": "1.git3177e06.el8", "source": "rpm", "version": "20230731" } ], "crypto-policies-scripts": [ { "arch": "noarch", "epoch": null, "name": "crypto-policies-scripts", "release": "1.git3177e06.el8", "source": "rpm", "version": "20230731" } ], "cryptsetup-libs": [ { "arch": "x86_64", "epoch": null, "name": "cryptsetup-libs", "release": "7.el8", "source": "rpm", "version": "2.3.7" } ], "curl": [ { "arch": "x86_64", "epoch": null, "name": "curl", "release": "34.el8", "source": "rpm", "version": "7.61.1" } ], "cyrus-sasl-lib": [ { "arch": "x86_64", "epoch": null, "name": "cyrus-sasl-lib", "release": "6.el8_5", "source": "rpm", "version": "2.1.27" } ], "cyrus-sasl-scram": [ { "arch": "x86_64", "epoch": null, "name": "cyrus-sasl-scram", "release": "6.el8_5", "source": "rpm", "version": "2.1.27" } ], "dbus": [ { "arch": "x86_64", "epoch": 1, "name": "dbus", "release": "26.el8", "source": "rpm", "version": "1.12.8" } ], "dbus-common": [ { "arch": "noarch", "epoch": 1, "name": "dbus-common", "release": "26.el8", "source": "rpm", "version": "1.12.8" } ], "dbus-daemon": [ { "arch": "x86_64", "epoch": 1, "name": "dbus-daemon", "release": "26.el8", "source": "rpm", "version": "1.12.8" } ], "dbus-glib": [ { "arch": "x86_64", "epoch": null, "name": "dbus-glib", "release": "2.el8", "source": "rpm", "version": "0.110" } ], "dbus-libs": [ { "arch": "x86_64", "epoch": 1, "name": "dbus-libs", "release": "26.el8", "source": "rpm", "version": "1.12.8" } ], "dbus-tools": [ { "arch": "x86_64", "epoch": 1, "name": "dbus-tools", "release": "26.el8", "source": "rpm", "version": "1.12.8" } ], "device-mapper": [ { "arch": "x86_64", "epoch": 8, "name": "device-mapper", "release": "14.el8", "source": "rpm", "version": "1.02.181" } ], "device-mapper-libs": [ { "arch": "x86_64", "epoch": 8, "name": "device-mapper-libs", "release": "14.el8", "source": "rpm", "version": "1.02.181" } ], "dhcp-client": [ { "arch": "x86_64", "epoch": 12, "name": "dhcp-client", "release": "50.el8", "source": "rpm", "version": "4.3.6" } ], "dhcp-common": [ { "arch": "noarch", "epoch": 12, "name": "dhcp-common", "release": "50.el8", "source": "rpm", "version": "4.3.6" } ], "dhcp-libs": [ { "arch": "x86_64", "epoch": 12, "name": "dhcp-libs", "release": "50.el8", "source": "rpm", "version": "4.3.6" } ], "diffutils": [ { "arch": "x86_64", "epoch": null, "name": "diffutils", "release": "6.el8", "source": "rpm", "version": "3.6" } ], "dmidecode": [ { "arch": "x86_64", "epoch": 1, "name": "dmidecode", "release": "4.el8", "source": "rpm", "version": "3.3" } ], "dnf": [ { "arch": "noarch", "epoch": null, "name": "dnf", "release": "20.el8", "source": "rpm", "version": "4.7.0" } ], "dnf-data": [ { "arch": "noarch", "epoch": null, "name": "dnf-data", "release": "20.el8", "source": "rpm", "version": "4.7.0" } ], "dnf-plugins-core": [ { "arch": "noarch", "epoch": null, "name": "dnf-plugins-core", "release": "25.el8", "source": "rpm", "version": "4.0.21" } ], "dracut": [ { "arch": "x86_64", "epoch": null, "name": "dracut", "release": "233.git20240115.el8", "source": "rpm", "version": "049" } ], "dracut-config-rescue": [ { "arch": "x86_64", "epoch": null, "name": "dracut-config-rescue", "release": "233.git20240115.el8", "source": "rpm", "version": "049" } ], "dracut-network": [ { "arch": "x86_64", "epoch": null, "name": "dracut-network", "release": "233.git20240115.el8", "source": "rpm", "version": "049" } ], "dracut-squash": [ { "arch": "x86_64", "epoch": null, "name": "dracut-squash", "release": "233.git20240115.el8", "source": "rpm", "version": "049" } ], "dyninst": [ { "arch": "x86_64", "epoch": null, "name": "dyninst", "release": "1.el8", "source": "rpm", "version": "12.1.0" } ], "e2fsprogs": [ { "arch": "x86_64", "epoch": null, "name": "e2fsprogs", "release": "5.el8", "source": "rpm", "version": "1.45.6" } ], "e2fsprogs-libs": [ { "arch": "x86_64", "epoch": null, "name": "e2fsprogs-libs", "release": "5.el8", "source": "rpm", "version": "1.45.6" } ], "efivar-libs": [ { "arch": "x86_64", "epoch": null, "name": "efivar-libs", "release": "4.el8", "source": "rpm", "version": "37" } ], "elfutils-debuginfod-client": [ { "arch": "x86_64", "epoch": null, "name": "elfutils-debuginfod-client", "release": "2.el8", "source": "rpm", "version": "0.190" } ], "elfutils-debuginfod-client-devel": [ { "arch": "x86_64", "epoch": null, "name": "elfutils-debuginfod-client-devel", "release": "2.el8", "source": "rpm", "version": "0.190" } ], "elfutils-default-yama-scope": [ { "arch": "noarch", "epoch": null, "name": "elfutils-default-yama-scope", "release": "2.el8", "source": "rpm", "version": "0.190" } ], "elfutils-devel": [ { "arch": "x86_64", "epoch": null, "name": "elfutils-devel", "release": "2.el8", "source": "rpm", "version": "0.190" } ], "elfutils-libelf": [ { "arch": "x86_64", "epoch": null, "name": "elfutils-libelf", "release": "2.el8", "source": "rpm", "version": "0.190" } ], "elfutils-libelf-devel": [ { "arch": "x86_64", "epoch": null, "name": "elfutils-libelf-devel", "release": "2.el8", "source": "rpm", "version": "0.190" } ], "elfutils-libs": [ { "arch": "x86_64", "epoch": null, "name": "elfutils-libs", "release": "2.el8", "source": "rpm", "version": "0.190" } ], "emacs-filesystem": [ { "arch": "noarch", "epoch": 1, "name": "emacs-filesystem", "release": "11.el8", "source": "rpm", "version": "26.1" } ], "environment-modules": [ { "arch": "x86_64", "epoch": null, "name": "environment-modules", "release": "4.el8", "source": "rpm", "version": "4.5.2" } ], "epel-release": [ { "arch": "noarch", "epoch": null, "name": "epel-release", "release": "19.el8", "source": "rpm", "version": "8" } ], "ethtool": [ { "arch": "x86_64", "epoch": 2, "name": "ethtool", "release": "2.el8", "source": "rpm", "version": "5.13" } ], "expat": [ { "arch": "x86_64", "epoch": null, "name": "expat", "release": "13.el8", "source": "rpm", "version": "2.2.5" } ], "file": [ { "arch": "x86_64", "epoch": null, "name": "file", "release": "26.el8", "source": "rpm", "version": "5.33" } ], "file-libs": [ { "arch": "x86_64", "epoch": null, "name": "file-libs", "release": "26.el8", "source": "rpm", "version": "5.33" } ], "filesystem": [ { "arch": "x86_64", "epoch": null, "name": "filesystem", "release": "6.el8", "source": "rpm", "version": "3.8" } ], "findutils": [ { "arch": "x86_64", "epoch": 1, "name": "findutils", "release": "22.el8", "source": "rpm", "version": "4.6.0" } ], "firewalld": [ { "arch": "noarch", "epoch": null, "name": "firewalld", "release": "4.el8", "source": "rpm", "version": "0.9.11" } ], "firewalld-filesystem": [ { "arch": "noarch", "epoch": null, "name": "firewalld-filesystem", "release": "4.el8", "source": "rpm", "version": "0.9.11" } ], "flex": [ { "arch": "x86_64", "epoch": null, "name": "flex", "release": "9.el8", "source": "rpm", "version": "2.6.1" } ], "freetype": [ { "arch": "x86_64", "epoch": null, "name": "freetype", "release": "9.el8", "source": "rpm", "version": "2.9.1" } ], "fuse-libs": [ { "arch": "x86_64", "epoch": null, "name": "fuse-libs", "release": "19.el8", "source": "rpm", "version": "2.9.7" } ], "gawk": [ { "arch": "x86_64", "epoch": null, "name": "gawk", "release": "4.el8", "source": "rpm", "version": "4.2.1" } ], "gcc": [ { "arch": "x86_64", "epoch": null, "name": "gcc", "release": "22.el8", "source": "rpm", "version": "8.5.0" } ], "gcc-toolset-13-binutils": [ { "arch": "x86_64", "epoch": null, "name": "gcc-toolset-13-binutils", "release": "21.el8", "source": "rpm", "version": "2.40" } ], "gcc-toolset-13-binutils-gold": [ { "arch": "x86_64", "epoch": null, "name": "gcc-toolset-13-binutils-gold", "release": "21.el8", "source": "rpm", "version": "2.40" } ], "gcc-toolset-13-gcc": [ { "arch": "x86_64", "epoch": null, "name": "gcc-toolset-13-gcc", "release": "6.2.el8", "source": "rpm", "version": "13.2.1" } ], "gcc-toolset-13-gcc-c++": [ { "arch": "x86_64", "epoch": null, "name": "gcc-toolset-13-gcc-c++", "release": "6.2.el8", "source": "rpm", "version": "13.2.1" } ], "gcc-toolset-13-libstdc++-devel": [ { "arch": "x86_64", "epoch": null, "name": "gcc-toolset-13-libstdc++-devel", "release": "6.2.el8", "source": "rpm", "version": "13.2.1" } ], "gcc-toolset-13-runtime": [ { "arch": "x86_64", "epoch": null, "name": "gcc-toolset-13-runtime", "release": "2.el8", "source": "rpm", "version": "13.0" } ], "gdbm": [ { "arch": "x86_64", "epoch": 1, "name": "gdbm", "release": "2.el8", "source": "rpm", "version": "1.18" } ], "gdbm-libs": [ { "arch": "x86_64", "epoch": 1, "name": "gdbm-libs", "release": "2.el8", "source": "rpm", "version": "1.18" } ], "gdisk": [ { "arch": "x86_64", "epoch": null, "name": "gdisk", "release": "11.el8", "source": "rpm", "version": "1.0.3" } ], "geolite2-city": [ { "arch": "noarch", "epoch": null, "name": "geolite2-city", "release": "1.el8", "source": "rpm", "version": "20180605" } ], "geolite2-country": [ { "arch": "noarch", "epoch": null, "name": "geolite2-country", "release": "1.el8", "source": "rpm", "version": "20180605" } ], "gettext": [ { "arch": "x86_64", "epoch": null, "name": "gettext", "release": "17.el8", "source": "rpm", "version": "0.19.8.1" } ], "gettext-libs": [ { "arch": "x86_64", "epoch": null, "name": "gettext-libs", "release": "17.el8", "source": "rpm", "version": "0.19.8.1" } ], "git": [ { "arch": "x86_64", "epoch": null, "name": "git", "release": "1.el8", "source": "rpm", "version": "2.43.0" } ], "git-core": [ { "arch": "x86_64", "epoch": null, "name": "git-core", "release": "1.el8", "source": "rpm", "version": "2.43.0" } ], "git-core-doc": [ { "arch": "noarch", "epoch": null, "name": "git-core-doc", "release": "1.el8", "source": "rpm", "version": "2.43.0" } ], "glib2": [ { "arch": "x86_64", "epoch": null, "name": "glib2", "release": "163.el8", "source": "rpm", "version": "2.56.4" } ], "glibc": [ { "arch": "x86_64", "epoch": null, "name": "glibc", "release": "251.el8.2", "source": "rpm", "version": "2.28" } ], "glibc-common": [ { "arch": "x86_64", "epoch": null, "name": "glibc-common", "release": "251.el8.2", "source": "rpm", "version": "2.28" } ], "glibc-devel": [ { "arch": "x86_64", "epoch": null, "name": "glibc-devel", "release": "251.el8.2", "source": "rpm", "version": "2.28" } ], "glibc-gconv-extra": [ { "arch": "x86_64", "epoch": null, "name": "glibc-gconv-extra", "release": "251.el8.2", "source": "rpm", "version": "2.28" } ], "glibc-headers": [ { "arch": "x86_64", "epoch": null, "name": "glibc-headers", "release": "251.el8.2", "source": "rpm", "version": "2.28" } ], "glibc-langpack-en": [ { "arch": "x86_64", "epoch": null, "name": "glibc-langpack-en", "release": "251.el8.2", "source": "rpm", "version": "2.28" } ], "gmp": [ { "arch": "x86_64", "epoch": 1, "name": "gmp", "release": "12.el8", "source": "rpm", "version": "6.1.2" } ], "gnupg2": [ { "arch": "x86_64", "epoch": null, "name": "gnupg2", "release": "3.el8", "source": "rpm", "version": "2.2.20" } ], "gnupg2-smime": [ { "arch": "x86_64", "epoch": null, "name": "gnupg2-smime", "release": "3.el8", "source": "rpm", "version": "2.2.20" } ], "gnutls": [ { "arch": "x86_64", "epoch": null, "name": "gnutls", "release": "8.el8.1", "source": "rpm", "version": "3.6.16" } ], "gobject-introspection": [ { "arch": "x86_64", "epoch": null, "name": "gobject-introspection", "release": "1.el8", "source": "rpm", "version": "1.56.1" } ], "gpg-pubkey": [ { "arch": null, "epoch": null, "name": "gpg-pubkey", "release": "5ccc5b19", "source": "rpm", "version": "8483c65d" }, { "arch": null, "epoch": null, "name": "gpg-pubkey", "release": "5cf7cefb", "source": "rpm", "version": "2f86d6a1" } ], "gpgme": [ { "arch": "x86_64", "epoch": null, "name": "gpgme", "release": "12.el8", "source": "rpm", "version": "1.13.1" } ], "gpm-libs": [ { "arch": "x86_64", "epoch": null, "name": "gpm-libs", "release": "17.el8", "source": "rpm", "version": "1.20.7" } ], "grafana": [ { "arch": "x86_64", "epoch": null, "name": "grafana", "release": "16.el8", "source": "rpm", "version": "9.2.10" } ], "grafana-pcp": [ { "arch": "x86_64", "epoch": null, "name": "grafana-pcp", "release": "2.el8", "source": "rpm", "version": "5.1.1" } ], "grafana-selinux": [ { "arch": "x86_64", "epoch": null, "name": "grafana-selinux", "release": "16.el8", "source": "rpm", "version": "9.2.10" } ], "grep": [ { "arch": "x86_64", "epoch": null, "name": "grep", "release": "6.el8", "source": "rpm", "version": "3.1" } ], "groff-base": [ { "arch": "x86_64", "epoch": null, "name": "groff-base", "release": "18.el8", "source": "rpm", "version": "1.22.3" } ], "grub2-common": [ { "arch": "noarch", "epoch": 1, "name": "grub2-common", "release": "129.el8", "source": "rpm", "version": "2.02" } ], "grub2-pc": [ { "arch": "x86_64", "epoch": 1, "name": "grub2-pc", "release": "129.el8", "source": "rpm", "version": "2.02" } ], "grub2-pc-modules": [ { "arch": "noarch", "epoch": 1, "name": "grub2-pc-modules", "release": "129.el8", "source": "rpm", "version": "2.02" } ], "grub2-tools": [ { "arch": "x86_64", "epoch": 1, "name": "grub2-tools", "release": "129.el8", "source": "rpm", "version": "2.02" } ], "grub2-tools-extra": [ { "arch": "x86_64", "epoch": 1, "name": "grub2-tools-extra", "release": "129.el8", "source": "rpm", "version": "2.02" } ], "grub2-tools-minimal": [ { "arch": "x86_64", "epoch": 1, "name": "grub2-tools-minimal", "release": "129.el8", "source": "rpm", "version": "2.02" } ], "grubby": [ { "arch": "x86_64", "epoch": null, "name": "grubby", "release": "48.el8", "source": "rpm", "version": "8.40" } ], "gssproxy": [ { "arch": "x86_64", "epoch": null, "name": "gssproxy", "release": "21.el8", "source": "rpm", "version": "0.8.0" } ], "gzip": [ { "arch": "x86_64", "epoch": null, "name": "gzip", "release": "13.el8", "source": "rpm", "version": "1.9" } ], "hardlink": [ { "arch": "x86_64", "epoch": 1, "name": "hardlink", "release": "6.el8", "source": "rpm", "version": "1.3" } ], "hdparm": [ { "arch": "x86_64", "epoch": null, "name": "hdparm", "release": "4.el8", "source": "rpm", "version": "9.54" } ], "hostname": [ { "arch": "x86_64", "epoch": null, "name": "hostname", "release": "7.el8.0.1", "source": "rpm", "version": "3.20" } ], "hwdata": [ { "arch": "noarch", "epoch": null, "name": "hwdata", "release": "8.22.el8", "source": "rpm", "version": "0.314" } ], "ima-evm-utils": [ { "arch": "x86_64", "epoch": null, "name": "ima-evm-utils", "release": "12.el8", "source": "rpm", "version": "1.3.2" } ], "info": [ { "arch": "x86_64", "epoch": null, "name": "info", "release": "7.el8_5", "source": "rpm", "version": "6.5" } ], "initscripts": [ { "arch": "x86_64", "epoch": null, "name": "initscripts", "release": "1.el8", "source": "rpm", "version": "10.00.18" } ], "ipcalc": [ { "arch": "x86_64", "epoch": null, "name": "ipcalc", "release": "4.el8", "source": "rpm", "version": "0.2.4" } ], "iproute": [ { "arch": "x86_64", "epoch": null, "name": "iproute", "release": "5.el8", "source": "rpm", "version": "6.2.0" } ], "iprutils": [ { "arch": "x86_64", "epoch": null, "name": "iprutils", "release": "1.el8", "source": "rpm", "version": "2.4.19" } ], "ipset": [ { "arch": "x86_64", "epoch": null, "name": "ipset", "release": "1.el8", "source": "rpm", "version": "7.1" } ], "ipset-libs": [ { "arch": "x86_64", "epoch": null, "name": "ipset-libs", "release": "1.el8", "source": "rpm", "version": "7.1" } ], "iptables": [ { "arch": "x86_64", "epoch": null, "name": "iptables", "release": "11.el8", "source": "rpm", "version": "1.8.5" } ], "iptables-ebtables": [ { "arch": "x86_64", "epoch": null, "name": "iptables-ebtables", "release": "11.el8", "source": "rpm", "version": "1.8.5" } ], "iptables-libs": [ { "arch": "x86_64", "epoch": null, "name": "iptables-libs", "release": "11.el8", "source": "rpm", "version": "1.8.5" } ], "iputils": [ { "arch": "x86_64", "epoch": null, "name": "iputils", "release": "11.el8", "source": "rpm", "version": "20180629" } ], "irqbalance": [ { "arch": "x86_64", "epoch": 2, "name": "irqbalance", "release": "1.el8", "source": "rpm", "version": "1.9.2" } ], "isl": [ { "arch": "x86_64", "epoch": null, "name": "isl", "release": "6.el8", "source": "rpm", "version": "0.16.1" } ], "iwl100-firmware": [ { "arch": "noarch", "epoch": null, "name": "iwl100-firmware", "release": "121.el8.1", "source": "rpm", "version": "39.31.5.1" } ], "iwl1000-firmware": [ { "arch": "noarch", "epoch": 1, "name": "iwl1000-firmware", "release": "121.el8.1", "source": "rpm", "version": "39.31.5.1" } ], "iwl105-firmware": [ { "arch": "noarch", "epoch": null, "name": "iwl105-firmware", "release": "121.el8.1", "source": "rpm", "version": "18.168.6.1" } ], "iwl135-firmware": [ { "arch": "noarch", "epoch": null, "name": "iwl135-firmware", "release": "121.el8.1", "source": "rpm", "version": "18.168.6.1" } ], "iwl2000-firmware": [ { "arch": "noarch", "epoch": null, "name": "iwl2000-firmware", "release": "121.el8.1", "source": "rpm", "version": "18.168.6.1" } ], "iwl2030-firmware": [ { "arch": "noarch", "epoch": null, "name": "iwl2030-firmware", "release": "121.el8.1", "source": "rpm", "version": "18.168.6.1" } ], "iwl3160-firmware": [ { "arch": "noarch", "epoch": 1, "name": "iwl3160-firmware", "release": "121.el8.1", "source": "rpm", "version": "25.30.13.0" } ], "iwl5000-firmware": [ { "arch": "noarch", "epoch": null, "name": "iwl5000-firmware", "release": "121.el8.1", "source": "rpm", "version": "8.83.5.1_1" } ], "iwl5150-firmware": [ { "arch": "noarch", "epoch": null, "name": "iwl5150-firmware", "release": "121.el8.1", "source": "rpm", "version": "8.24.2.2" } ], "iwl6000-firmware": [ { "arch": "noarch", "epoch": null, "name": "iwl6000-firmware", "release": "121.el8.1", "source": "rpm", "version": "9.221.4.1" } ], "iwl6000g2a-firmware": [ { "arch": "noarch", "epoch": null, "name": "iwl6000g2a-firmware", "release": "121.el8.1", "source": "rpm", "version": "18.168.6.1" } ], "iwl6050-firmware": [ { "arch": "noarch", "epoch": null, "name": "iwl6050-firmware", "release": "121.el8.1", "source": "rpm", "version": "41.28.5.1" } ], "iwl7260-firmware": [ { "arch": "noarch", "epoch": 1, "name": "iwl7260-firmware", "release": "121.el8.1", "source": "rpm", "version": "25.30.13.0" } ], "jansson": [ { "arch": "x86_64", "epoch": null, "name": "jansson", "release": "1.el8", "source": "rpm", "version": "2.14" } ], "json-c": [ { "arch": "x86_64", "epoch": null, "name": "json-c", "release": "3.el8", "source": "rpm", "version": "0.13.1" } ], "kbd": [ { "arch": "x86_64", "epoch": null, "name": "kbd", "release": "11.el8", "source": "rpm", "version": "2.0.4" } ], "kbd-legacy": [ { "arch": "noarch", "epoch": null, "name": "kbd-legacy", "release": "11.el8", "source": "rpm", "version": "2.0.4" } ], "kbd-misc": [ { "arch": "noarch", "epoch": null, "name": "kbd-misc", "release": "11.el8", "source": "rpm", "version": "2.0.4" } ], "kernel": [ { "arch": "x86_64", "epoch": null, "name": "kernel", "release": "553.5.1.el8", "source": "rpm", "version": "4.18.0" } ], "kernel-core": [ { "arch": "x86_64", "epoch": null, "name": "kernel-core", "release": "553.5.1.el8", "source": "rpm", "version": "4.18.0" } ], "kernel-devel": [ { "arch": "x86_64", "epoch": null, "name": "kernel-devel", "release": "553.5.1.el8", "source": "rpm", "version": "4.18.0" } ], "kernel-headers": [ { "arch": "x86_64", "epoch": null, "name": "kernel-headers", "release": "553.5.1.el8", "source": "rpm", "version": "4.18.0" } ], "kernel-modules": [ { "arch": "x86_64", "epoch": null, "name": "kernel-modules", "release": "553.5.1.el8", "source": "rpm", "version": "4.18.0" } ], "kernel-tools": [ { "arch": "x86_64", "epoch": null, "name": "kernel-tools", "release": "553.5.1.el8", "source": "rpm", "version": "4.18.0" } ], "kernel-tools-libs": [ { "arch": "x86_64", "epoch": null, "name": "kernel-tools-libs", "release": "553.5.1.el8", "source": "rpm", "version": "4.18.0" } ], "kexec-tools": [ { "arch": "x86_64", "epoch": null, "name": "kexec-tools", "release": "14.el8", "source": "rpm", "version": "2.0.26" } ], "keyutils": [ { "arch": "x86_64", "epoch": null, "name": "keyutils", "release": "9.el8", "source": "rpm", "version": "1.5.10" } ], "keyutils-libs": [ { "arch": "x86_64", "epoch": null, "name": "keyutils-libs", "release": "9.el8", "source": "rpm", "version": "1.5.10" } ], "keyutils-libs-devel": [ { "arch": "x86_64", "epoch": null, "name": "keyutils-libs-devel", "release": "9.el8", "source": "rpm", "version": "1.5.10" } ], "kmod": [ { "arch": "x86_64", "epoch": null, "name": "kmod", "release": "20.el8", "source": "rpm", "version": "25" } ], "kmod-libs": [ { "arch": "x86_64", "epoch": null, "name": "kmod-libs", "release": "20.el8", "source": "rpm", "version": "25" } ], "kpartx": [ { "arch": "x86_64", "epoch": null, "name": "kpartx", "release": "41.el8", "source": "rpm", "version": "0.8.4" } ], "krb5-devel": [ { "arch": "x86_64", "epoch": null, "name": "krb5-devel", "release": "27.el8", "source": "rpm", "version": "1.18.2" } ], "krb5-libs": [ { "arch": "x86_64", "epoch": null, "name": "krb5-libs", "release": "27.el8", "source": "rpm", "version": "1.18.2" } ], "langpacks-en": [ { "arch": "noarch", "epoch": null, "name": "langpacks-en", "release": "12.el8", "source": "rpm", "version": "1.0" } ], "less": [ { "arch": "x86_64", "epoch": null, "name": "less", "release": "3.el8", "source": "rpm", "version": "530" } ], "libacl": [ { "arch": "x86_64", "epoch": null, "name": "libacl", "release": "3.el8", "source": "rpm", "version": "2.2.53" } ], "libarchive": [ { "arch": "x86_64", "epoch": null, "name": "libarchive", "release": "5.el8", "source": "rpm", "version": "3.3.3" } ], "libassuan": [ { "arch": "x86_64", "epoch": null, "name": "libassuan", "release": "3.el8", "source": "rpm", "version": "2.5.1" } ], "libattr": [ { "arch": "x86_64", "epoch": null, "name": "libattr", "release": "3.el8", "source": "rpm", "version": "2.4.48" } ], "libbasicobjects": [ { "arch": "x86_64", "epoch": null, "name": "libbasicobjects", "release": "40.el8", "source": "rpm", "version": "0.1.1" } ], "libblkid": [ { "arch": "x86_64", "epoch": null, "name": "libblkid", "release": "46.el8", "source": "rpm", "version": "2.32.1" } ], "libbpf": [ { "arch": "x86_64", "epoch": null, "name": "libbpf", "release": "1.el8", "source": "rpm", "version": "0.5.0" } ], "libcap": [ { "arch": "x86_64", "epoch": null, "name": "libcap", "release": "5.el8", "source": "rpm", "version": "2.48" } ], "libcap-ng": [ { "arch": "x86_64", "epoch": null, "name": "libcap-ng", "release": "1.el8", "source": "rpm", "version": "0.7.11" } ], "libcollection": [ { "arch": "x86_64", "epoch": null, "name": "libcollection", "release": "40.el8", "source": "rpm", "version": "0.7.0" } ], "libcom_err": [ { "arch": "x86_64", "epoch": null, "name": "libcom_err", "release": "5.el8", "source": "rpm", "version": "1.45.6" } ], "libcom_err-devel": [ { "arch": "x86_64", "epoch": null, "name": "libcom_err-devel", "release": "5.el8", "source": "rpm", "version": "1.45.6" } ], "libcomps": [ { "arch": "x86_64", "epoch": null, "name": "libcomps", "release": "1.el8", "source": "rpm", "version": "0.1.18" } ], "libcroco": [ { "arch": "x86_64", "epoch": null, "name": "libcroco", "release": "4.el8_2.1", "source": "rpm", "version": "0.6.12" } ], "libcurl": [ { "arch": "x86_64", "epoch": null, "name": "libcurl", "release": "34.el8", "source": "rpm", "version": "7.61.1" } ], "libdaemon": [ { "arch": "x86_64", "epoch": null, "name": "libdaemon", "release": "15.el8", "source": "rpm", "version": "0.14" } ], "libdb": [ { "arch": "x86_64", "epoch": null, "name": "libdb", "release": "42.el8_4", "source": "rpm", "version": "5.3.28" } ], "libdb-utils": [ { "arch": "x86_64", "epoch": null, "name": "libdb-utils", "release": "42.el8_4", "source": "rpm", "version": "5.3.28" } ], "libdhash": [ { "arch": "x86_64", "epoch": null, "name": "libdhash", "release": "40.el8", "source": "rpm", "version": "0.5.0" } ], "libdnf": [ { "arch": "x86_64", "epoch": null, "name": "libdnf", "release": "19.el8", "source": "rpm", "version": "0.63.0" } ], "libedit": [ { "arch": "x86_64", "epoch": null, "name": "libedit", "release": "23.20170329cvs.el8", "source": "rpm", "version": "3.1" } ], "libestr": [ { "arch": "x86_64", "epoch": null, "name": "libestr", "release": "3.el8", "source": "rpm", "version": "0.1.10" } ], "libev": [ { "arch": "x86_64", "epoch": null, "name": "libev", "release": "6.el8", "source": "rpm", "version": "4.24" } ], "libevent": [ { "arch": "x86_64", "epoch": null, "name": "libevent", "release": "5.el8", "source": "rpm", "version": "2.1.8" } ], "libfastjson": [ { "arch": "x86_64", "epoch": null, "name": "libfastjson", "release": "2.el8", "source": "rpm", "version": "0.99.9" } ], "libfdisk": [ { "arch": "x86_64", "epoch": null, "name": "libfdisk", "release": "46.el8", "source": "rpm", "version": "2.32.1" } ], "libffi": [ { "arch": "x86_64", "epoch": null, "name": "libffi", "release": "24.el8", "source": "rpm", "version": "3.1" } ], "libgcc": [ { "arch": "x86_64", "epoch": null, "name": "libgcc", "release": "22.el8", "source": "rpm", "version": "8.5.0" } ], "libgcrypt": [ { "arch": "x86_64", "epoch": null, "name": "libgcrypt", "release": "7.el8", "source": "rpm", "version": "1.8.5" } ], "libgomp": [ { "arch": "x86_64", "epoch": null, "name": "libgomp", "release": "22.el8", "source": "rpm", "version": "8.5.0" } ], "libgpg-error": [ { "arch": "x86_64", "epoch": null, "name": "libgpg-error", "release": "1.el8", "source": "rpm", "version": "1.31" } ], "libibverbs": [ { "arch": "x86_64", "epoch": null, "name": "libibverbs", "release": "1.el8", "source": "rpm", "version": "48.0" } ], "libidn2": [ { "arch": "x86_64", "epoch": null, "name": "libidn2", "release": "1.el8", "source": "rpm", "version": "2.2.0" } ], "libini_config": [ { "arch": "x86_64", "epoch": null, "name": "libini_config", "release": "40.el8", "source": "rpm", "version": "1.3.1" } ], "libkadm5": [ { "arch": "x86_64", "epoch": null, "name": "libkadm5", "release": "27.el8", "source": "rpm", "version": "1.18.2" } ], "libkcapi": [ { "arch": "x86_64", "epoch": null, "name": "libkcapi", "release": "2.el8", "source": "rpm", "version": "1.4.0" } ], "libkcapi-hmaccalc": [ { "arch": "x86_64", "epoch": null, "name": "libkcapi-hmaccalc", "release": "2.el8", "source": "rpm", "version": "1.4.0" } ], "libksba": [ { "arch": "x86_64", "epoch": null, "name": "libksba", "release": "9.el8", "source": "rpm", "version": "1.3.5" } ], "libldb": [ { "arch": "x86_64", "epoch": null, "name": "libldb", "release": "0.el8", "source": "rpm", "version": "2.8.0" } ], "libmaxminddb": [ { "arch": "x86_64", "epoch": null, "name": "libmaxminddb", "release": "10.el8.1", "source": "rpm", "version": "1.2.0" } ], "libmetalink": [ { "arch": "x86_64", "epoch": null, "name": "libmetalink", "release": "7.el8", "source": "rpm", "version": "0.1.3" } ], "libmnl": [ { "arch": "x86_64", "epoch": null, "name": "libmnl", "release": "6.el8", "source": "rpm", "version": "1.0.4" } ], "libmodulemd": [ { "arch": "x86_64", "epoch": null, "name": "libmodulemd", "release": "1.el8", "source": "rpm", "version": "2.13.0" } ], "libmount": [ { "arch": "x86_64", "epoch": null, "name": "libmount", "release": "46.el8", "source": "rpm", "version": "2.32.1" } ], "libmpc": [ { "arch": "x86_64", "epoch": null, "name": "libmpc", "release": "9.1.el8", "source": "rpm", "version": "1.1.0" } ], "libndp": [ { "arch": "x86_64", "epoch": null, "name": "libndp", "release": "6.el8", "source": "rpm", "version": "1.7" } ], "libnetfilter_conntrack": [ { "arch": "x86_64", "epoch": null, "name": "libnetfilter_conntrack", "release": "5.el8", "source": "rpm", "version": "1.0.6" } ], "libnfnetlink": [ { "arch": "x86_64", "epoch": null, "name": "libnfnetlink", "release": "13.el8", "source": "rpm", "version": "1.0.1" } ], "libnfsidmap": [ { "arch": "x86_64", "epoch": 1, "name": "libnfsidmap", "release": "59.el8", "source": "rpm", "version": "2.3.3" } ], "libnftnl": [ { "arch": "x86_64", "epoch": null, "name": "libnftnl", "release": "3.el8", "source": "rpm", "version": "1.2.2" } ], "libnghttp2": [ { "arch": "x86_64", "epoch": null, "name": "libnghttp2", "release": "6.el8.1", "source": "rpm", "version": "1.33.0" } ], "libnl3": [ { "arch": "x86_64", "epoch": null, "name": "libnl3", "release": "1.el8", "source": "rpm", "version": "3.7.0" } ], "libnl3-cli": [ { "arch": "x86_64", "epoch": null, "name": "libnl3-cli", "release": "1.el8", "source": "rpm", "version": "3.7.0" } ], "libnsl2": [ { "arch": "x86_64", "epoch": null, "name": "libnsl2", "release": "2.20180605git4a062cf.el8", "source": "rpm", "version": "1.2.0" } ], "libomp": [ { "arch": "x86_64", "epoch": null, "name": "libomp", "release": "1.module_el8+767+9fa966b8", "source": "rpm", "version": "17.0.6" } ], "libomp-devel": [ { "arch": "x86_64", "epoch": null, "name": "libomp-devel", "release": "1.module_el8+767+9fa966b8", "source": "rpm", "version": "17.0.6" } ], "libpath_utils": [ { "arch": "x86_64", "epoch": null, "name": "libpath_utils", "release": "40.el8", "source": "rpm", "version": "0.2.1" } ], "libpcap": [ { "arch": "x86_64", "epoch": 14, "name": "libpcap", "release": "5.el8", "source": "rpm", "version": "1.9.1" } ], "libpipeline": [ { "arch": "x86_64", "epoch": null, "name": "libpipeline", "release": "2.el8", "source": "rpm", "version": "1.5.0" } ], "libpkgconf": [ { "arch": "x86_64", "epoch": null, "name": "libpkgconf", "release": "1.el8", "source": "rpm", "version": "1.4.2" } ], "libpng": [ { "arch": "x86_64", "epoch": 2, "name": "libpng", "release": "5.el8", "source": "rpm", "version": "1.6.34" } ], "libpsl": [ { "arch": "x86_64", "epoch": null, "name": "libpsl", "release": "6.el8", "source": "rpm", "version": "0.20.2" } ], "libpwquality": [ { "arch": "x86_64", "epoch": null, "name": "libpwquality", "release": "6.el8", "source": "rpm", "version": "1.4.4" } ], "libref_array": [ { "arch": "x86_64", "epoch": null, "name": "libref_array", "release": "40.el8", "source": "rpm", "version": "0.1.5" } ], "librepo": [ { "arch": "x86_64", "epoch": null, "name": "librepo", "release": "5.el8", "source": "rpm", "version": "1.14.2" } ], "libreport-filesystem": [ { "arch": "x86_64", "epoch": null, "name": "libreport-filesystem", "release": "15.el8", "source": "rpm", "version": "2.9.5" } ], "libseccomp": [ { "arch": "x86_64", "epoch": null, "name": "libseccomp", "release": "1.el8", "source": "rpm", "version": "2.5.2" } ], "libsecret": [ { "arch": "x86_64", "epoch": null, "name": "libsecret", "release": "1.el8", "source": "rpm", "version": "0.18.6" } ], "libselinux": [ { "arch": "x86_64", "epoch": null, "name": "libselinux", "release": "8.el8", "source": "rpm", "version": "2.9" } ], "libselinux-devel": [ { "arch": "x86_64", "epoch": null, "name": "libselinux-devel", "release": "8.el8", "source": "rpm", "version": "2.9" } ], "libselinux-utils": [ { "arch": "x86_64", "epoch": null, "name": "libselinux-utils", "release": "8.el8", "source": "rpm", "version": "2.9" } ], "libsemanage": [ { "arch": "x86_64", "epoch": null, "name": "libsemanage", "release": "9.el8", "source": "rpm", "version": "2.9" } ], "libsepol": [ { "arch": "x86_64", "epoch": null, "name": "libsepol", "release": "3.el8", "source": "rpm", "version": "2.9" } ], "libsepol-devel": [ { "arch": "x86_64", "epoch": null, "name": "libsepol-devel", "release": "3.el8", "source": "rpm", "version": "2.9" } ], "libsigsegv": [ { "arch": "x86_64", "epoch": null, "name": "libsigsegv", "release": "5.el8", "source": "rpm", "version": "2.11" } ], "libsmartcols": [ { "arch": "x86_64", "epoch": null, "name": "libsmartcols", "release": "46.el8", "source": "rpm", "version": "2.32.1" } ], "libsolv": [ { "arch": "x86_64", "epoch": null, "name": "libsolv", "release": "6.el8", "source": "rpm", "version": "0.7.20" } ], "libss": [ { "arch": "x86_64", "epoch": null, "name": "libss", "release": "5.el8", "source": "rpm", "version": "1.45.6" } ], "libssh": [ { "arch": "x86_64", "epoch": null, "name": "libssh", "release": "14.el8", "source": "rpm", "version": "0.9.6" } ], "libssh-config": [ { "arch": "noarch", "epoch": null, "name": "libssh-config", "release": "14.el8", "source": "rpm", "version": "0.9.6" } ], "libsss_autofs": [ { "arch": "x86_64", "epoch": null, "name": "libsss_autofs", "release": "3.el8", "source": "rpm", "version": "2.9.4" } ], "libsss_certmap": [ { "arch": "x86_64", "epoch": null, "name": "libsss_certmap", "release": "3.el8", "source": "rpm", "version": "2.9.4" } ], "libsss_idmap": [ { "arch": "x86_64", "epoch": null, "name": "libsss_idmap", "release": "3.el8", "source": "rpm", "version": "2.9.4" } ], "libsss_nss_idmap": [ { "arch": "x86_64", "epoch": null, "name": "libsss_nss_idmap", "release": "3.el8", "source": "rpm", "version": "2.9.4" } ], "libsss_sudo": [ { "arch": "x86_64", "epoch": null, "name": "libsss_sudo", "release": "3.el8", "source": "rpm", "version": "2.9.4" } ], "libstdc++": [ { "arch": "x86_64", "epoch": null, "name": "libstdc++", "release": "22.el8", "source": "rpm", "version": "8.5.0" } ], "libsysfs": [ { "arch": "x86_64", "epoch": null, "name": "libsysfs", "release": "25.el8", "source": "rpm", "version": "2.1.0" } ], "libtalloc": [ { "arch": "x86_64", "epoch": null, "name": "libtalloc", "release": "0.el8", "source": "rpm", "version": "2.4.1" } ], "libtasn1": [ { "arch": "x86_64", "epoch": null, "name": "libtasn1", "release": "4.el8", "source": "rpm", "version": "4.13" } ], "libtdb": [ { "arch": "x86_64", "epoch": null, "name": "libtdb", "release": "0.el8", "source": "rpm", "version": "1.4.9" } ], "libteam": [ { "arch": "x86_64", "epoch": null, "name": "libteam", "release": "4.el8", "source": "rpm", "version": "1.31" } ], "libtevent": [ { "arch": "x86_64", "epoch": null, "name": "libtevent", "release": "0.el8", "source": "rpm", "version": "0.16.0" } ], "libtirpc": [ { "arch": "x86_64", "epoch": null, "name": "libtirpc", "release": "12.el8", "source": "rpm", "version": "1.1.4" } ], "libunistring": [ { "arch": "x86_64", "epoch": null, "name": "libunistring", "release": "3.el8", "source": "rpm", "version": "0.9.9" } ], "libusbx": [ { "arch": "x86_64", "epoch": null, "name": "libusbx", "release": "4.el8", "source": "rpm", "version": "1.0.23" } ], "libuser": [ { "arch": "x86_64", "epoch": null, "name": "libuser", "release": "25.el8", "source": "rpm", "version": "0.62" } ], "libutempter": [ { "arch": "x86_64", "epoch": null, "name": "libutempter", "release": "14.el8", "source": "rpm", "version": "1.1.6" } ], "libuuid": [ { "arch": "x86_64", "epoch": null, "name": "libuuid", "release": "46.el8", "source": "rpm", "version": "2.32.1" } ], "libuv": [ { "arch": "x86_64", "epoch": 1, "name": "libuv", "release": "1.el8_4", "source": "rpm", "version": "1.41.1" } ], "libverto": [ { "arch": "x86_64", "epoch": null, "name": "libverto", "release": "2.el8", "source": "rpm", "version": "0.3.2" } ], "libverto-devel": [ { "arch": "x86_64", "epoch": null, "name": "libverto-devel", "release": "2.el8", "source": "rpm", "version": "0.3.2" } ], "libverto-libev": [ { "arch": "x86_64", "epoch": null, "name": "libverto-libev", "release": "2.el8", "source": "rpm", "version": "0.3.2" } ], "libxcrypt": [ { "arch": "x86_64", "epoch": null, "name": "libxcrypt", "release": "6.el8", "source": "rpm", "version": "4.1.1" } ], "libxcrypt-devel": [ { "arch": "x86_64", "epoch": null, "name": "libxcrypt-devel", "release": "6.el8", "source": "rpm", "version": "4.1.1" } ], "libxkbcommon": [ { "arch": "x86_64", "epoch": null, "name": "libxkbcommon", "release": "1.el8", "source": "rpm", "version": "0.9.1" } ], "libxml2": [ { "arch": "x86_64", "epoch": null, "name": "libxml2", "release": "18.el8", "source": "rpm", "version": "2.9.7" } ], "libxslt": [ { "arch": "x86_64", "epoch": null, "name": "libxslt", "release": "6.el8", "source": "rpm", "version": "1.1.32" } ], "libyaml": [ { "arch": "x86_64", "epoch": null, "name": "libyaml", "release": "5.el8", "source": "rpm", "version": "0.1.7" } ], "libzstd": [ { "arch": "x86_64", "epoch": null, "name": "libzstd", "release": "1.el8", "source": "rpm", "version": "1.4.4" } ], "libzstd-devel": [ { "arch": "x86_64", "epoch": null, "name": "libzstd-devel", "release": "1.el8", "source": "rpm", "version": "1.4.4" } ], "linux-firmware": [ { "arch": "noarch", "epoch": null, "name": "linux-firmware", "release": "121.gitb3132c18.el8", "source": "rpm", "version": "20240111" } ], "llvm-libs": [ { "arch": "x86_64", "epoch": null, "name": "llvm-libs", "release": "2.module_el8+895+5524c78c", "source": "rpm", "version": "17.0.6" } ], "lmdb-libs": [ { "arch": "x86_64", "epoch": null, "name": "lmdb-libs", "release": "2.el8", "source": "rpm", "version": "0.9.24" } ], "logrotate": [ { "arch": "x86_64", "epoch": null, "name": "logrotate", "release": "6.el8", "source": "rpm", "version": "3.14.0" } ], "lshw": [ { "arch": "x86_64", "epoch": null, "name": "lshw", "release": "6.el8", "source": "rpm", "version": "B.02.19.2" } ], "lsscsi": [ { "arch": "x86_64", "epoch": null, "name": "lsscsi", "release": "3.el8", "source": "rpm", "version": "0.32" } ], "lua-libs": [ { "arch": "x86_64", "epoch": null, "name": "lua-libs", "release": "12.el8", "source": "rpm", "version": "5.3.4" } ], "lz4-libs": [ { "arch": "x86_64", "epoch": null, "name": "lz4-libs", "release": "3.el8_4", "source": "rpm", "version": "1.8.3" } ], "lzo": [ { "arch": "x86_64", "epoch": null, "name": "lzo", "release": "14.el8", "source": "rpm", "version": "2.08" } ], "m4": [ { "arch": "x86_64", "epoch": null, "name": "m4", "release": "7.el8", "source": "rpm", "version": "1.4.18" } ], "make": [ { "arch": "x86_64", "epoch": 1, "name": "make", "release": "11.el8", "source": "rpm", "version": "4.2.1" } ], "man-db": [ { "arch": "x86_64", "epoch": null, "name": "man-db", "release": "18.el8", "source": "rpm", "version": "2.7.6.1" } ], "memstrack": [ { "arch": "x86_64", "epoch": null, "name": "memstrack", "release": "2.el8", "source": "rpm", "version": "0.2.5" } ], "microcode_ctl": [ { "arch": "x86_64", "epoch": 4, "name": "microcode_ctl", "release": "2.el8", "source": "rpm", "version": "20230808" } ], "mokutil": [ { "arch": "x86_64", "epoch": 1, "name": "mokutil", "release": "12.el8", "source": "rpm", "version": "0.3.0" } ], "mozjs60": [ { "arch": "x86_64", "epoch": null, "name": "mozjs60", "release": "4.el8", "source": "rpm", "version": "60.9.0" } ], "mpfr": [ { "arch": "x86_64", "epoch": null, "name": "mpfr", "release": "1.el8", "source": "rpm", "version": "3.1.6" } ], "ncurses": [ { "arch": "x86_64", "epoch": null, "name": "ncurses", "release": "10.20180224.el8", "source": "rpm", "version": "6.1" } ], "ncurses-base": [ { "arch": "noarch", "epoch": null, "name": "ncurses-base", "release": "10.20180224.el8", "source": "rpm", "version": "6.1" } ], "ncurses-libs": [ { "arch": "x86_64", "epoch": null, "name": "ncurses-libs", "release": "10.20180224.el8", "source": "rpm", "version": "6.1" } ], "nettle": [ { "arch": "x86_64", "epoch": null, "name": "nettle", "release": "7.el8", "source": "rpm", "version": "3.4.1" } ], "newt": [ { "arch": "x86_64", "epoch": null, "name": "newt", "release": "11.el8", "source": "rpm", "version": "0.52.20" } ], "nfs-utils": [ { "arch": "x86_64", "epoch": 1, "name": "nfs-utils", "release": "59.el8", "source": "rpm", "version": "2.3.3" } ], "nftables": [ { "arch": "x86_64", "epoch": 1, "name": "nftables", "release": "4.el8", "source": "rpm", "version": "1.0.4" } ], "npth": [ { "arch": "x86_64", "epoch": null, "name": "npth", "release": "4.el8", "source": "rpm", "version": "1.5" } ], "nspr": [ { "arch": "x86_64", "epoch": null, "name": "nspr", "release": "1.el8", "source": "rpm", "version": "4.35.0" } ], "nss": [ { "arch": "x86_64", "epoch": null, "name": "nss", "release": "7.el8", "source": "rpm", "version": "3.90.0" } ], "nss-softokn": [ { "arch": "x86_64", "epoch": null, "name": "nss-softokn", "release": "7.el8", "source": "rpm", "version": "3.90.0" } ], "nss-softokn-freebl": [ { "arch": "x86_64", "epoch": null, "name": "nss-softokn-freebl", "release": "7.el8", "source": "rpm", "version": "3.90.0" } ], "nss-sysinit": [ { "arch": "x86_64", "epoch": null, "name": "nss-sysinit", "release": "7.el8", "source": "rpm", "version": "3.90.0" } ], "nss-util": [ { "arch": "x86_64", "epoch": null, "name": "nss-util", "release": "7.el8", "source": "rpm", "version": "3.90.0" } ], "numactl-libs": [ { "arch": "x86_64", "epoch": null, "name": "numactl-libs", "release": "4.el8", "source": "rpm", "version": "2.0.16" } ], "oddjob": [ { "arch": "x86_64", "epoch": null, "name": "oddjob", "release": "3.el8", "source": "rpm", "version": "0.34.7" } ], "oddjob-mkhomedir": [ { "arch": "x86_64", "epoch": null, "name": "oddjob-mkhomedir", "release": "3.el8", "source": "rpm", "version": "0.34.7" } ], "openldap": [ { "arch": "x86_64", "epoch": null, "name": "openldap", "release": "18.el8", "source": "rpm", "version": "2.4.46" } ], "openssh": [ { "arch": "x86_64", "epoch": null, "name": "openssh", "release": "24.el8", "source": "rpm", "version": "8.0p1" } ], "openssh-clients": [ { "arch": "x86_64", "epoch": null, "name": "openssh-clients", "release": "24.el8", "source": "rpm", "version": "8.0p1" } ], "openssh-server": [ { "arch": "x86_64", "epoch": null, "name": "openssh-server", "release": "24.el8", "source": "rpm", "version": "8.0p1" } ], "openssl": [ { "arch": "x86_64", "epoch": 1, "name": "openssl", "release": "12.el8", "source": "rpm", "version": "1.1.1k" } ], "openssl-devel": [ { "arch": "x86_64", "epoch": 1, "name": "openssl-devel", "release": "12.el8", "source": "rpm", "version": "1.1.1k" } ], "openssl-libs": [ { "arch": "x86_64", "epoch": 1, "name": "openssl-libs", "release": "12.el8", "source": "rpm", "version": "1.1.1k" } ], "openssl-pkcs11": [ { "arch": "x86_64", "epoch": null, "name": "openssl-pkcs11", "release": "3.el8", "source": "rpm", "version": "0.4.10" } ], "os-prober": [ { "arch": "x86_64", "epoch": null, "name": "os-prober", "release": "9.el8", "source": "rpm", "version": "1.74" } ], "p11-kit": [ { "arch": "x86_64", "epoch": null, "name": "p11-kit", "release": "2.el8", "source": "rpm", "version": "0.23.22" } ], "p11-kit-trust": [ { "arch": "x86_64", "epoch": null, "name": "p11-kit-trust", "release": "2.el8", "source": "rpm", "version": "0.23.22" } ], "pam": [ { "arch": "x86_64", "epoch": null, "name": "pam", "release": "33.el8", "source": "rpm", "version": "1.3.1" } ], "parted": [ { "arch": "x86_64", "epoch": null, "name": "parted", "release": "39.el8", "source": "rpm", "version": "3.2" } ], "passwd": [ { "arch": "x86_64", "epoch": null, "name": "passwd", "release": "4.el8", "source": "rpm", "version": "0.80" } ], "pciutils-libs": [ { "arch": "x86_64", "epoch": null, "name": "pciutils-libs", "release": "3.el8", "source": "rpm", "version": "3.7.0" } ], "pcp": [ { "arch": "x86_64", "epoch": null, "name": "pcp", "release": "20.el8", "source": "rpm", "version": "5.3.7" } ], "pcp-conf": [ { "arch": "x86_64", "epoch": null, "name": "pcp-conf", "release": "20.el8", "source": "rpm", "version": "5.3.7" } ], "pcp-doc": [ { "arch": "noarch", "epoch": null, "name": "pcp-doc", "release": "20.el8", "source": "rpm", "version": "5.3.7" } ], "pcp-libs": [ { "arch": "x86_64", "epoch": null, "name": "pcp-libs", "release": "20.el8", "source": "rpm", "version": "5.3.7" } ], "pcp-pmda-bpftrace": [ { "arch": "x86_64", "epoch": null, "name": "pcp-pmda-bpftrace", "release": "20.el8", "source": "rpm", "version": "5.3.7" } ], "pcp-pmda-dm": [ { "arch": "x86_64", "epoch": null, "name": "pcp-pmda-dm", "release": "20.el8", "source": "rpm", "version": "5.3.7" } ], "pcp-pmda-nfsclient": [ { "arch": "x86_64", "epoch": null, "name": "pcp-pmda-nfsclient", "release": "20.el8", "source": "rpm", "version": "5.3.7" } ], "pcp-pmda-openmetrics": [ { "arch": "x86_64", "epoch": null, "name": "pcp-pmda-openmetrics", "release": "20.el8", "source": "rpm", "version": "5.3.7" } ], "pcp-selinux": [ { "arch": "x86_64", "epoch": null, "name": "pcp-selinux", "release": "20.el8", "source": "rpm", "version": "5.3.7" } ], "pcp-system-tools": [ { "arch": "x86_64", "epoch": null, "name": "pcp-system-tools", "release": "20.el8", "source": "rpm", "version": "5.3.7" } ], "pcp-zeroconf": [ { "arch": "x86_64", "epoch": null, "name": "pcp-zeroconf", "release": "20.el8", "source": "rpm", "version": "5.3.7" } ], "pcre": [ { "arch": "x86_64", "epoch": null, "name": "pcre", "release": "6.el8", "source": "rpm", "version": "8.42" } ], "pcre2": [ { "arch": "x86_64", "epoch": null, "name": "pcre2", "release": "3.el8", "source": "rpm", "version": "10.32" } ], "pcre2-devel": [ { "arch": "x86_64", "epoch": null, "name": "pcre2-devel", "release": "3.el8", "source": "rpm", "version": "10.32" } ], "pcre2-utf16": [ { "arch": "x86_64", "epoch": null, "name": "pcre2-utf16", "release": "3.el8", "source": "rpm", "version": "10.32" } ], "pcre2-utf32": [ { "arch": "x86_64", "epoch": null, "name": "pcre2-utf32", "release": "3.el8", "source": "rpm", "version": "10.32" } ], "perl-Carp": [ { "arch": "noarch", "epoch": null, "name": "perl-Carp", "release": "396.el8", "source": "rpm", "version": "1.42" } ], "perl-Data-Dumper": [ { "arch": "x86_64", "epoch": null, "name": "perl-Data-Dumper", "release": "399.el8", "source": "rpm", "version": "2.167" } ], "perl-Digest": [ { "arch": "noarch", "epoch": null, "name": "perl-Digest", "release": "395.el8", "source": "rpm", "version": "1.17" } ], "perl-Digest-MD5": [ { "arch": "x86_64", "epoch": null, "name": "perl-Digest-MD5", "release": "396.el8", "source": "rpm", "version": "2.55" } ], "perl-Encode": [ { "arch": "x86_64", "epoch": 4, "name": "perl-Encode", "release": "3.el8", "source": "rpm", "version": "2.97" } ], "perl-Errno": [ { "arch": "x86_64", "epoch": 0, "name": "perl-Errno", "release": "422.el8", "source": "rpm", "version": "1.28" } ], "perl-Error": [ { "arch": "noarch", "epoch": 1, "name": "perl-Error", "release": "2.el8", "source": "rpm", "version": "0.17025" } ], "perl-Exporter": [ { "arch": "noarch", "epoch": null, "name": "perl-Exporter", "release": "396.el8", "source": "rpm", "version": "5.72" } ], "perl-File-Path": [ { "arch": "noarch", "epoch": null, "name": "perl-File-Path", "release": "2.el8", "source": "rpm", "version": "2.15" } ], "perl-File-Temp": [ { "arch": "noarch", "epoch": null, "name": "perl-File-Temp", "release": "1.el8", "source": "rpm", "version": "0.230.600" } ], "perl-Getopt-Long": [ { "arch": "noarch", "epoch": 1, "name": "perl-Getopt-Long", "release": "4.el8", "source": "rpm", "version": "2.50" } ], "perl-Git": [ { "arch": "noarch", "epoch": null, "name": "perl-Git", "release": "1.el8", "source": "rpm", "version": "2.43.0" } ], "perl-HTTP-Tiny": [ { "arch": "noarch", "epoch": null, "name": "perl-HTTP-Tiny", "release": "3.el8", "source": "rpm", "version": "0.074" } ], "perl-IO": [ { "arch": "x86_64", "epoch": 0, "name": "perl-IO", "release": "422.el8", "source": "rpm", "version": "1.38" } ], "perl-IO-Socket-IP": [ { "arch": "noarch", "epoch": null, "name": "perl-IO-Socket-IP", "release": "5.el8", "source": "rpm", "version": "0.39" } ], "perl-IO-Socket-SSL": [ { "arch": "noarch", "epoch": null, "name": "perl-IO-Socket-SSL", "release": "4.module_el8+339+1ec643e0", "source": "rpm", "version": "2.066" } ], "perl-MIME-Base64": [ { "arch": "x86_64", "epoch": null, "name": "perl-MIME-Base64", "release": "396.el8", "source": "rpm", "version": "3.15" } ], "perl-Mozilla-CA": [ { "arch": "noarch", "epoch": null, "name": "perl-Mozilla-CA", "release": "7.module_el8+645+9d809f8c", "source": "rpm", "version": "20160104" } ], "perl-Net-SSLeay": [ { "arch": "x86_64", "epoch": null, "name": "perl-Net-SSLeay", "release": "2.module_el8+339+1ec643e0", "source": "rpm", "version": "1.88" } ], "perl-PathTools": [ { "arch": "x86_64", "epoch": null, "name": "perl-PathTools", "release": "1.el8", "source": "rpm", "version": "3.74" } ], "perl-Pod-Escapes": [ { "arch": "noarch", "epoch": 1, "name": "perl-Pod-Escapes", "release": "395.el8", "source": "rpm", "version": "1.07" } ], "perl-Pod-Perldoc": [ { "arch": "noarch", "epoch": null, "name": "perl-Pod-Perldoc", "release": "396.el8", "source": "rpm", "version": "3.28" } ], "perl-Pod-Simple": [ { "arch": "noarch", "epoch": 1, "name": "perl-Pod-Simple", "release": "395.el8", "source": "rpm", "version": "3.35" } ], "perl-Pod-Usage": [ { "arch": "noarch", "epoch": 4, "name": "perl-Pod-Usage", "release": "395.el8", "source": "rpm", "version": "1.69" } ], "perl-Scalar-List-Utils": [ { "arch": "x86_64", "epoch": 3, "name": "perl-Scalar-List-Utils", "release": "2.el8", "source": "rpm", "version": "1.49" } ], "perl-Socket": [ { "arch": "x86_64", "epoch": 4, "name": "perl-Socket", "release": "3.el8", "source": "rpm", "version": "2.027" } ], "perl-Storable": [ { "arch": "x86_64", "epoch": 1, "name": "perl-Storable", "release": "3.el8", "source": "rpm", "version": "3.11" } ], "perl-Term-ANSIColor": [ { "arch": "noarch", "epoch": null, "name": "perl-Term-ANSIColor", "release": "396.el8", "source": "rpm", "version": "4.06" } ], "perl-Term-Cap": [ { "arch": "noarch", "epoch": null, "name": "perl-Term-Cap", "release": "395.el8", "source": "rpm", "version": "1.17" } ], "perl-TermReadKey": [ { "arch": "x86_64", "epoch": null, "name": "perl-TermReadKey", "release": "7.el8", "source": "rpm", "version": "2.37" } ], "perl-Text-ParseWords": [ { "arch": "noarch", "epoch": null, "name": "perl-Text-ParseWords", "release": "395.el8", "source": "rpm", "version": "3.30" } ], "perl-Text-Tabs+Wrap": [ { "arch": "noarch", "epoch": null, "name": "perl-Text-Tabs+Wrap", "release": "395.el8", "source": "rpm", "version": "2013.0523" } ], "perl-Time-Local": [ { "arch": "noarch", "epoch": 1, "name": "perl-Time-Local", "release": "1.el8", "source": "rpm", "version": "1.280" } ], "perl-URI": [ { "arch": "noarch", "epoch": null, "name": "perl-URI", "release": "3.el8", "source": "rpm", "version": "1.73" } ], "perl-Unicode-Normalize": [ { "arch": "x86_64", "epoch": null, "name": "perl-Unicode-Normalize", "release": "396.el8", "source": "rpm", "version": "1.25" } ], "perl-constant": [ { "arch": "noarch", "epoch": null, "name": "perl-constant", "release": "396.el8", "source": "rpm", "version": "1.33" } ], "perl-interpreter": [ { "arch": "x86_64", "epoch": 4, "name": "perl-interpreter", "release": "422.el8", "source": "rpm", "version": "5.26.3" } ], "perl-libnet": [ { "arch": "noarch", "epoch": null, "name": "perl-libnet", "release": "3.el8", "source": "rpm", "version": "3.11" } ], "perl-libs": [ { "arch": "x86_64", "epoch": 4, "name": "perl-libs", "release": "422.el8", "source": "rpm", "version": "5.26.3" } ], "perl-macros": [ { "arch": "x86_64", "epoch": 4, "name": "perl-macros", "release": "422.el8", "source": "rpm", "version": "5.26.3" } ], "perl-parent": [ { "arch": "noarch", "epoch": 1, "name": "perl-parent", "release": "1.el8", "source": "rpm", "version": "0.237" } ], "perl-podlators": [ { "arch": "noarch", "epoch": null, "name": "perl-podlators", "release": "1.el8", "source": "rpm", "version": "4.11" } ], "perl-threads": [ { "arch": "x86_64", "epoch": 1, "name": "perl-threads", "release": "2.el8", "source": "rpm", "version": "2.21" } ], "perl-threads-shared": [ { "arch": "x86_64", "epoch": null, "name": "perl-threads-shared", "release": "2.el8", "source": "rpm", "version": "1.58" } ], "pigz": [ { "arch": "x86_64", "epoch": null, "name": "pigz", "release": "4.el8", "source": "rpm", "version": "2.4" } ], "pinentry": [ { "arch": "x86_64", "epoch": null, "name": "pinentry", "release": "2.el8", "source": "rpm", "version": "1.1.0" } ], "pkgconf": [ { "arch": "x86_64", "epoch": null, "name": "pkgconf", "release": "1.el8", "source": "rpm", "version": "1.4.2" } ], "pkgconf-m4": [ { "arch": "noarch", "epoch": null, "name": "pkgconf-m4", "release": "1.el8", "source": "rpm", "version": "1.4.2" } ], "pkgconf-pkg-config": [ { "arch": "x86_64", "epoch": null, "name": "pkgconf-pkg-config", "release": "1.el8", "source": "rpm", "version": "1.4.2" } ], "platform-python": [ { "arch": "x86_64", "epoch": null, "name": "platform-python", "release": "62.el8", "source": "rpm", "version": "3.6.8" } ], "platform-python-pip": [ { "arch": "noarch", "epoch": null, "name": "platform-python-pip", "release": "24.el8", "source": "rpm", "version": "9.0.3" } ], "platform-python-setuptools": [ { "arch": "noarch", "epoch": null, "name": "platform-python-setuptools", "release": "7.el8", "source": "rpm", "version": "39.2.0" } ], "plymouth": [ { "arch": "x86_64", "epoch": null, "name": "plymouth", "release": "11.20200615git1e36e30.el8", "source": "rpm", "version": "0.9.4" } ], "plymouth-core-libs": [ { "arch": "x86_64", "epoch": null, "name": "plymouth-core-libs", "release": "11.20200615git1e36e30.el8", "source": "rpm", "version": "0.9.4" } ], "plymouth-scripts": [ { "arch": "x86_64", "epoch": null, "name": "plymouth-scripts", "release": "11.20200615git1e36e30.el8", "source": "rpm", "version": "0.9.4" } ], "policycoreutils": [ { "arch": "x86_64", "epoch": null, "name": "policycoreutils", "release": "26.el8", "source": "rpm", "version": "2.9" } ], "policycoreutils-python-utils": [ { "arch": "noarch", "epoch": null, "name": "policycoreutils-python-utils", "release": "26.el8", "source": "rpm", "version": "2.9" } ], "polkit": [ { "arch": "x86_64", "epoch": null, "name": "polkit", "release": "15.el8", "source": "rpm", "version": "0.115" } ], "polkit-libs": [ { "arch": "x86_64", "epoch": null, "name": "polkit-libs", "release": "15.el8", "source": "rpm", "version": "0.115" } ], "polkit-pkla-compat": [ { "arch": "x86_64", "epoch": null, "name": "polkit-pkla-compat", "release": "12.el8", "source": "rpm", "version": "0.1" } ], "popt": [ { "arch": "x86_64", "epoch": null, "name": "popt", "release": "1.el8", "source": "rpm", "version": "1.18" } ], "prefixdevname": [ { "arch": "x86_64", "epoch": null, "name": "prefixdevname", "release": "6.el8", "source": "rpm", "version": "0.1.0" } ], "procps-ng": [ { "arch": "x86_64", "epoch": null, "name": "procps-ng", "release": "14.el8", "source": "rpm", "version": "3.3.15" } ], "psmisc": [ { "arch": "x86_64", "epoch": null, "name": "psmisc", "release": "5.el8", "source": "rpm", "version": "23.1" } ], "publicsuffix-list-dafsa": [ { "arch": "noarch", "epoch": null, "name": "publicsuffix-list-dafsa", "release": "1.el8", "source": "rpm", "version": "20180723" } ], "python3-audit": [ { "arch": "x86_64", "epoch": null, "name": "python3-audit", "release": "1.el8", "source": "rpm", "version": "3.1.2" } ], "python3-babel": [ { "arch": "noarch", "epoch": null, "name": "python3-babel", "release": "7.el8", "source": "rpm", "version": "2.5.1" } ], "python3-bcc": [ { "arch": "x86_64", "epoch": null, "name": "python3-bcc", "release": "7.el8", "source": "rpm", "version": "0.25.0" } ], "python3-cffi": [ { "arch": "x86_64", "epoch": null, "name": "python3-cffi", "release": "6.el8", "source": "rpm", "version": "1.11.5" } ], "python3-chardet": [ { "arch": "noarch", "epoch": null, "name": "python3-chardet", "release": "7.el8", "source": "rpm", "version": "3.0.4" } ], "python3-configobj": [ { "arch": "noarch", "epoch": null, "name": "python3-configobj", "release": "11.el8", "source": "rpm", "version": "5.0.6" } ], "python3-cryptography": [ { "arch": "x86_64", "epoch": null, "name": "python3-cryptography", "release": "7.el8", "source": "rpm", "version": "3.2.1" } ], "python3-dateutil": [ { "arch": "noarch", "epoch": 1, "name": "python3-dateutil", "release": "6.el8", "source": "rpm", "version": "2.6.1" } ], "python3-dbus": [ { "arch": "x86_64", "epoch": null, "name": "python3-dbus", "release": "15.el8", "source": "rpm", "version": "1.2.4" } ], "python3-decorator": [ { "arch": "noarch", "epoch": null, "name": "python3-decorator", "release": "2.el8", "source": "rpm", "version": "4.2.1" } ], "python3-dnf": [ { "arch": "noarch", "epoch": null, "name": "python3-dnf", "release": "20.el8", "source": "rpm", "version": "4.7.0" } ], "python3-dnf-plugins-core": [ { "arch": "noarch", "epoch": null, "name": "python3-dnf-plugins-core", "release": "25.el8", "source": "rpm", "version": "4.0.21" } ], "python3-firewall": [ { "arch": "noarch", "epoch": null, "name": "python3-firewall", "release": "4.el8", "source": "rpm", "version": "0.9.11" } ], "python3-gobject-base": [ { "arch": "x86_64", "epoch": null, "name": "python3-gobject-base", "release": "2.el8", "source": "rpm", "version": "3.28.3" } ], "python3-gpg": [ { "arch": "x86_64", "epoch": null, "name": "python3-gpg", "release": "12.el8", "source": "rpm", "version": "1.13.1" } ], "python3-hawkey": [ { "arch": "x86_64", "epoch": null, "name": "python3-hawkey", "release": "19.el8", "source": "rpm", "version": "0.63.0" } ], "python3-html5lib": [ { "arch": "noarch", "epoch": 1, "name": "python3-html5lib", "release": "6.el8", "source": "rpm", "version": "0.999999999" } ], "python3-idna": [ { "arch": "noarch", "epoch": null, "name": "python3-idna", "release": "7.el8", "source": "rpm", "version": "2.5" } ], "python3-jinja2": [ { "arch": "noarch", "epoch": null, "name": "python3-jinja2", "release": "5.el8", "source": "rpm", "version": "2.10.1" } ], "python3-jsonpatch": [ { "arch": "noarch", "epoch": null, "name": "python3-jsonpatch", "release": "2.el8", "source": "rpm", "version": "1.21" } ], "python3-jsonpointer": [ { "arch": "noarch", "epoch": null, "name": "python3-jsonpointer", "release": "11.el8", "source": "rpm", "version": "1.10" } ], "python3-jsonschema": [ { "arch": "noarch", "epoch": null, "name": "python3-jsonschema", "release": "4.el8", "source": "rpm", "version": "2.6.0" } ], "python3-jwt": [ { "arch": "noarch", "epoch": null, "name": "python3-jwt", "release": "2.el8", "source": "rpm", "version": "1.6.1" } ], "python3-libcomps": [ { "arch": "x86_64", "epoch": null, "name": "python3-libcomps", "release": "1.el8", "source": "rpm", "version": "0.1.18" } ], "python3-libdnf": [ { "arch": "x86_64", "epoch": null, "name": "python3-libdnf", "release": "19.el8", "source": "rpm", "version": "0.63.0" } ], "python3-libs": [ { "arch": "x86_64", "epoch": null, "name": "python3-libs", "release": "62.el8", "source": "rpm", "version": "3.6.8" } ], "python3-libselinux": [ { "arch": "x86_64", "epoch": null, "name": "python3-libselinux", "release": "8.el8", "source": "rpm", "version": "2.9" } ], "python3-libsemanage": [ { "arch": "x86_64", "epoch": null, "name": "python3-libsemanage", "release": "9.el8", "source": "rpm", "version": "2.9" } ], "python3-linux-procfs": [ { "arch": "noarch", "epoch": null, "name": "python3-linux-procfs", "release": "1.el8", "source": "rpm", "version": "0.7.3" } ], "python3-lxml": [ { "arch": "x86_64", "epoch": null, "name": "python3-lxml", "release": "4.el8", "source": "rpm", "version": "4.2.3" } ], "python3-markupsafe": [ { "arch": "x86_64", "epoch": null, "name": "python3-markupsafe", "release": "19.el8", "source": "rpm", "version": "0.23" } ], "python3-netaddr": [ { "arch": "noarch", "epoch": null, "name": "python3-netaddr", "release": "8.el8", "source": "rpm", "version": "0.7.19" } ], "python3-netifaces": [ { "arch": "x86_64", "epoch": null, "name": "python3-netifaces", "release": "4.el8", "source": "rpm", "version": "0.10.6" } ], "python3-nftables": [ { "arch": "x86_64", "epoch": 1, "name": "python3-nftables", "release": "4.el8", "source": "rpm", "version": "1.0.4" } ], "python3-oauthlib": [ { "arch": "noarch", "epoch": null, "name": "python3-oauthlib", "release": "1.el8", "source": "rpm", "version": "2.1.0" } ], "python3-pcp": [ { "arch": "x86_64", "epoch": null, "name": "python3-pcp", "release": "20.el8", "source": "rpm", "version": "5.3.7" } ], "python3-perf": [ { "arch": "x86_64", "epoch": null, "name": "python3-perf", "release": "553.5.1.el8", "source": "rpm", "version": "4.18.0" } ], "python3-pip": [ { "arch": "noarch", "epoch": null, "name": "python3-pip", "release": "24.el8", "source": "rpm", "version": "9.0.3" } ], "python3-pip-wheel": [ { "arch": "noarch", "epoch": null, "name": "python3-pip-wheel", "release": "24.el8", "source": "rpm", "version": "9.0.3" } ], "python3-ply": [ { "arch": "noarch", "epoch": null, "name": "python3-ply", "release": "9.el8", "source": "rpm", "version": "3.9" } ], "python3-policycoreutils": [ { "arch": "noarch", "epoch": null, "name": "python3-policycoreutils", "release": "26.el8", "source": "rpm", "version": "2.9" } ], "python3-prettytable": [ { "arch": "noarch", "epoch": null, "name": "python3-prettytable", "release": "14.el8", "source": "rpm", "version": "0.7.2" } ], "python3-pycparser": [ { "arch": "noarch", "epoch": null, "name": "python3-pycparser", "release": "14.el8", "source": "rpm", "version": "2.14" } ], "python3-pyserial": [ { "arch": "noarch", "epoch": null, "name": "python3-pyserial", "release": "9.el8", "source": "rpm", "version": "3.1.1" } ], "python3-pysocks": [ { "arch": "noarch", "epoch": null, "name": "python3-pysocks", "release": "3.el8", "source": "rpm", "version": "1.6.8" } ], "python3-pytz": [ { "arch": "noarch", "epoch": null, "name": "python3-pytz", "release": "11.el8", "source": "rpm", "version": "2017.2" } ], "python3-pyudev": [ { "arch": "noarch", "epoch": null, "name": "python3-pyudev", "release": "7.el8", "source": "rpm", "version": "0.21.0" } ], "python3-pyyaml": [ { "arch": "x86_64", "epoch": null, "name": "python3-pyyaml", "release": "12.el8", "source": "rpm", "version": "3.12" } ], "python3-requests": [ { "arch": "noarch", "epoch": null, "name": "python3-requests", "release": "4.el8", "source": "rpm", "version": "2.20.0" } ], "python3-rpm": [ { "arch": "x86_64", "epoch": null, "name": "python3-rpm", "release": "31.el8", "source": "rpm", "version": "4.14.3" } ], "python3-setools": [ { "arch": "x86_64", "epoch": null, "name": "python3-setools", "release": "5.el8", "source": "rpm", "version": "4.3.0" } ], "python3-setuptools": [ { "arch": "noarch", "epoch": null, "name": "python3-setuptools", "release": "7.el8", "source": "rpm", "version": "39.2.0" } ], "python3-setuptools-wheel": [ { "arch": "noarch", "epoch": null, "name": "python3-setuptools-wheel", "release": "7.el8", "source": "rpm", "version": "39.2.0" } ], "python3-six": [ { "arch": "noarch", "epoch": null, "name": "python3-six", "release": "8.el8", "source": "rpm", "version": "1.11.0" } ], "python3-slip": [ { "arch": "noarch", "epoch": null, "name": "python3-slip", "release": "13.el8", "source": "rpm", "version": "0.6.4" } ], "python3-slip-dbus": [ { "arch": "noarch", "epoch": null, "name": "python3-slip-dbus", "release": "13.el8", "source": "rpm", "version": "0.6.4" } ], "python3-syspurpose": [ { "arch": "x86_64", "epoch": null, "name": "python3-syspurpose", "release": "1.el8", "source": "rpm", "version": "1.28.42" } ], "python3-systemd": [ { "arch": "x86_64", "epoch": null, "name": "python3-systemd", "release": "8.el8", "source": "rpm", "version": "234" } ], "python3-unbound": [ { "arch": "x86_64", "epoch": null, "name": "python3-unbound", "release": "5.el8", "source": "rpm", "version": "1.16.2" } ], "python3-urllib3": [ { "arch": "noarch", "epoch": null, "name": "python3-urllib3", "release": "7.el8", "source": "rpm", "version": "1.24.2" } ], "python3-webencodings": [ { "arch": "noarch", "epoch": null, "name": "python3-webencodings", "release": "6.el8", "source": "rpm", "version": "0.5.1" } ], "python36": [ { "arch": "x86_64", "epoch": null, "name": "python36", "release": "39.module_el8+762+77bd8591", "source": "rpm", "version": "3.6.8" } ], "qa-tools": [ { "arch": "noarch", "epoch": null, "name": "qa-tools", "release": "4.el8", "source": "rpm", "version": "4.1" } ], "qemu-guest-agent": [ { "arch": "x86_64", "epoch": 15, "name": "qemu-guest-agent", "release": "49.module_el8+991+097e156d", "source": "rpm", "version": "6.2.0" } ], "quota": [ { "arch": "x86_64", "epoch": 1, "name": "quota", "release": "14.el8", "source": "rpm", "version": "4.04" } ], "quota-nls": [ { "arch": "noarch", "epoch": 1, "name": "quota-nls", "release": "14.el8", "source": "rpm", "version": "4.04" } ], "readline": [ { "arch": "x86_64", "epoch": null, "name": "readline", "release": "10.el8", "source": "rpm", "version": "7.0" } ], "redis": [ { "arch": "x86_64", "epoch": null, "name": "redis", "release": "5.module_el8.4.0+955+7126e393", "source": "rpm", "version": "5.0.3" } ], "restraint": [ { "arch": "x86_64", "epoch": null, "name": "restraint", "release": "1.el8bkr", "source": "rpm", "version": "0.4.4" } ], "restraint-rhts": [ { "arch": "x86_64", "epoch": null, "name": "restraint-rhts", "release": "1.el8bkr", "source": "rpm", "version": "0.4.4" } ], "rng-tools": [ { "arch": "x86_64", "epoch": null, "name": "rng-tools", "release": "1.el8", "source": "rpm", "version": "6.16" } ], "rootfiles": [ { "arch": "noarch", "epoch": null, "name": "rootfiles", "release": "22.el8", "source": "rpm", "version": "8.1" } ], "rpcbind": [ { "arch": "x86_64", "epoch": null, "name": "rpcbind", "release": "10.el8", "source": "rpm", "version": "1.2.5" } ], "rpm": [ { "arch": "x86_64", "epoch": null, "name": "rpm", "release": "31.el8", "source": "rpm", "version": "4.14.3" } ], "rpm-build-libs": [ { "arch": "x86_64", "epoch": null, "name": "rpm-build-libs", "release": "31.el8", "source": "rpm", "version": "4.14.3" } ], "rpm-libs": [ { "arch": "x86_64", "epoch": null, "name": "rpm-libs", "release": "31.el8", "source": "rpm", "version": "4.14.3" } ], "rpm-plugin-selinux": [ { "arch": "x86_64", "epoch": null, "name": "rpm-plugin-selinux", "release": "31.el8", "source": "rpm", "version": "4.14.3" } ], "rpm-plugin-systemd-inhibit": [ { "arch": "x86_64", "epoch": null, "name": "rpm-plugin-systemd-inhibit", "release": "31.el8", "source": "rpm", "version": "4.14.3" } ], "rsync": [ { "arch": "x86_64", "epoch": null, "name": "rsync", "release": "19.el8.1", "source": "rpm", "version": "3.1.3" } ], "rsyslog": [ { "arch": "x86_64", "epoch": null, "name": "rsyslog", "release": "15.el8", "source": "rpm", "version": "8.2102.0" } ], "scl-utils": [ { "arch": "x86_64", "epoch": 1, "name": "scl-utils", "release": "16.el8", "source": "rpm", "version": "2.0.2" } ], "sed": [ { "arch": "x86_64", "epoch": null, "name": "sed", "release": "5.el8", "source": "rpm", "version": "4.5" } ], "selinux-policy": [ { "arch": "noarch", "epoch": null, "name": "selinux-policy", "release": "139.el8", "source": "rpm", "version": "3.14.3" } ], "selinux-policy-targeted": [ { "arch": "noarch", "epoch": null, "name": "selinux-policy-targeted", "release": "139.el8", "source": "rpm", "version": "3.14.3" } ], "setup": [ { "arch": "noarch", "epoch": null, "name": "setup", "release": "11.el8", "source": "rpm", "version": "2.12.2" } ], "sg3_utils": [ { "arch": "x86_64", "epoch": null, "name": "sg3_utils", "release": "6.el8", "source": "rpm", "version": "1.44" } ], "sg3_utils-libs": [ { "arch": "x86_64", "epoch": null, "name": "sg3_utils-libs", "release": "6.el8", "source": "rpm", "version": "1.44" } ], "shadow-utils": [ { "arch": "x86_64", "epoch": 2, "name": "shadow-utils", "release": "22.el8", "source": "rpm", "version": "4.6" } ], "shared-mime-info": [ { "arch": "x86_64", "epoch": null, "name": "shared-mime-info", "release": "4.el8", "source": "rpm", "version": "1.9" } ], "slang": [ { "arch": "x86_64", "epoch": null, "name": "slang", "release": "3.el8", "source": "rpm", "version": "2.3.2" } ], "snappy": [ { "arch": "x86_64", "epoch": null, "name": "snappy", "release": "3.el8", "source": "rpm", "version": "1.1.8" } ], "sqlite-libs": [ { "arch": "x86_64", "epoch": null, "name": "sqlite-libs", "release": "19.el8", "source": "rpm", "version": "3.26.0" } ], "squashfs-tools": [ { "arch": "x86_64", "epoch": null, "name": "squashfs-tools", "release": "21.el8", "source": "rpm", "version": "4.3" } ], "sssd-client": [ { "arch": "x86_64", "epoch": null, "name": "sssd-client", "release": "3.el8", "source": "rpm", "version": "2.9.4" } ], "sssd-common": [ { "arch": "x86_64", "epoch": null, "name": "sssd-common", "release": "3.el8", "source": "rpm", "version": "2.9.4" } ], "sssd-kcm": [ { "arch": "x86_64", "epoch": null, "name": "sssd-kcm", "release": "3.el8", "source": "rpm", "version": "2.9.4" } ], "sssd-nfs-idmap": [ { "arch": "x86_64", "epoch": null, "name": "sssd-nfs-idmap", "release": "3.el8", "source": "rpm", "version": "2.9.4" } ], "strace": [ { "arch": "x86_64", "epoch": null, "name": "strace", "release": "2.el8", "source": "rpm", "version": "5.18" } ], "sudo": [ { "arch": "x86_64", "epoch": null, "name": "sudo", "release": "1.el8", "source": "rpm", "version": "1.9.5p2" } ], "systemd": [ { "arch": "x86_64", "epoch": null, "name": "systemd", "release": "82.el8.1", "source": "rpm", "version": "239" } ], "systemd-libs": [ { "arch": "x86_64", "epoch": null, "name": "systemd-libs", "release": "82.el8.1", "source": "rpm", "version": "239" } ], "systemd-pam": [ { "arch": "x86_64", "epoch": null, "name": "systemd-pam", "release": "82.el8.1", "source": "rpm", "version": "239" } ], "systemd-udev": [ { "arch": "x86_64", "epoch": null, "name": "systemd-udev", "release": "82.el8.1", "source": "rpm", "version": "239" } ], "systemtap": [ { "arch": "x86_64", "epoch": null, "name": "systemtap", "release": "3.el8", "source": "rpm", "version": "4.9" } ], "systemtap-client": [ { "arch": "x86_64", "epoch": null, "name": "systemtap-client", "release": "3.el8", "source": "rpm", "version": "4.9" } ], "systemtap-devel": [ { "arch": "x86_64", "epoch": null, "name": "systemtap-devel", "release": "3.el8", "source": "rpm", "version": "4.9" } ], "systemtap-runtime": [ { "arch": "x86_64", "epoch": null, "name": "systemtap-runtime", "release": "3.el8", "source": "rpm", "version": "4.9" } ], "tar": [ { "arch": "x86_64", "epoch": 2, "name": "tar", "release": "9.el8", "source": "rpm", "version": "1.30" } ], "tbb": [ { "arch": "x86_64", "epoch": null, "name": "tbb", "release": "9.el8", "source": "rpm", "version": "2018.2" } ], "tcl": [ { "arch": "x86_64", "epoch": 1, "name": "tcl", "release": "2.el8", "source": "rpm", "version": "8.6.8" } ], "teamd": [ { "arch": "x86_64", "epoch": null, "name": "teamd", "release": "4.el8", "source": "rpm", "version": "1.31" } ], "time": [ { "arch": "x86_64", "epoch": null, "name": "time", "release": "3.el8", "source": "rpm", "version": "1.9" } ], "timedatex": [ { "arch": "x86_64", "epoch": null, "name": "timedatex", "release": "3.el8", "source": "rpm", "version": "0.5" } ], "tpm2-tss": [ { "arch": "x86_64", "epoch": null, "name": "tpm2-tss", "release": "6.el8", "source": "rpm", "version": "2.3.2" } ], "trousers": [ { "arch": "x86_64", "epoch": null, "name": "trousers", "release": "2.el8", "source": "rpm", "version": "0.3.15" } ], "trousers-lib": [ { "arch": "x86_64", "epoch": null, "name": "trousers-lib", "release": "2.el8", "source": "rpm", "version": "0.3.15" } ], "tuned": [ { "arch": "noarch", "epoch": null, "name": "tuned", "release": "4.el8.1", "source": "rpm", "version": "2.22.1" } ], "tzdata": [ { "arch": "noarch", "epoch": null, "name": "tzdata", "release": "1.el8", "source": "rpm", "version": "2024a" } ], "unbound-libs": [ { "arch": "x86_64", "epoch": null, "name": "unbound-libs", "release": "5.el8", "source": "rpm", "version": "1.16.2" } ], "unzip": [ { "arch": "x86_64", "epoch": null, "name": "unzip", "release": "46.el8", "source": "rpm", "version": "6.0" } ], "util-linux": [ { "arch": "x86_64", "epoch": null, "name": "util-linux", "release": "46.el8", "source": "rpm", "version": "2.32.1" } ], "vim-common": [ { "arch": "x86_64", "epoch": 2, "name": "vim-common", "release": "19.el8.4", "source": "rpm", "version": "8.0.1763" } ], "vim-enhanced": [ { "arch": "x86_64", "epoch": 2, "name": "vim-enhanced", "release": "19.el8.4", "source": "rpm", "version": "8.0.1763" } ], "vim-filesystem": [ { "arch": "noarch", "epoch": 2, "name": "vim-filesystem", "release": "19.el8.4", "source": "rpm", "version": "8.0.1763" } ], "vim-minimal": [ { "arch": "x86_64", "epoch": 2, "name": "vim-minimal", "release": "19.el8.4", "source": "rpm", "version": "8.0.1763" } ], "virt-what": [ { "arch": "x86_64", "epoch": null, "name": "virt-what", "release": "4.el8", "source": "rpm", "version": "1.25" } ], "wget": [ { "arch": "x86_64", "epoch": null, "name": "wget", "release": "11.el8", "source": "rpm", "version": "1.19.5" } ], "which": [ { "arch": "x86_64", "epoch": null, "name": "which", "release": "20.el8", "source": "rpm", "version": "2.21" } ], "xfsprogs": [ { "arch": "x86_64", "epoch": null, "name": "xfsprogs", "release": "12.el8", "source": "rpm", "version": "5.0.0" } ], "xkeyboard-config": [ { "arch": "noarch", "epoch": null, "name": "xkeyboard-config", "release": "1.el8", "source": "rpm", "version": "2.28" } ], "xz": [ { "arch": "x86_64", "epoch": null, "name": "xz", "release": "4.el8", "source": "rpm", "version": "5.2.4" } ], "xz-devel": [ { "arch": "x86_64", "epoch": null, "name": "xz-devel", "release": "4.el8", "source": "rpm", "version": "5.2.4" } ], "xz-libs": [ { "arch": "x86_64", "epoch": null, "name": "xz-libs", "release": "4.el8", "source": "rpm", "version": "5.2.4" } ], "yum": [ { "arch": "noarch", "epoch": null, "name": "yum", "release": "20.el8", "source": "rpm", "version": "4.7.0" } ], "yum-utils": [ { "arch": "noarch", "epoch": null, "name": "yum-utils", "release": "25.el8", "source": "rpm", "version": "4.0.21" } ], "zip": [ { "arch": "x86_64", "epoch": null, "name": "zip", "release": "23.el8", "source": "rpm", "version": "3.0" } ], "zlib": [ { "arch": "x86_64", "epoch": null, "name": "zlib", "release": "25.el8", "source": "rpm", "version": "1.2.11" } ], "zlib-devel": [ { "arch": "x86_64", "epoch": null, "name": "zlib-devel", "release": "25.el8", "source": "rpm", "version": "1.2.11" } ] } }, "changed": false } TASK [fedora.linux_system_roles.private_metrics_subrole_grafana : Get Grafana version number from installed packages] *** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_grafana/tasks/main.yml:36 Tuesday 19 November 2024 14:39:49 -0500 (0:00:02.113) 0:00:49.493 ****** ok: [managed-node3] => { "ansible_facts": { "grafana_version": "9.2.10" }, "changed": false } TASK [fedora.linux_system_roles.private_metrics_subrole_grafana : Template Grafana v8 and earlier configuration] *** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_grafana/tasks/main.yml:40 Tuesday 19 November 2024 14:39:49 -0500 (0:00:00.078) 0:00:49.572 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "grafana_version is version('9.0.0', '<')", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.private_metrics_subrole_grafana : Template Grafana v9 and later configuration] *** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_grafana/tasks/main.yml:48 Tuesday 19 November 2024 14:39:49 -0500 (0:00:00.038) 0:00:49.611 ****** ok: [managed-node3] => { "changed": false, "checksum": "5cca05de69249344ed95e58493e10495e854415f", "dest": "/etc/grafana/grafana.ini", "gid": 988, "group": "grafana", "mode": "0640", "owner": "root", "path": "/etc/grafana/grafana.ini", "secontext": "system_u:object_r:grafana_conf_t:s0", "size": 47717, "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.private_metrics_subrole_grafana : Ensure Grafana configuration directory exists] *** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_grafana/tasks/main.yml:56 Tuesday 19 November 2024 14:39:50 -0500 (0:00:01.032) 0:00:50.643 ****** ok: [managed-node3] => { "changed": false, "gid": 988, "group": "grafana", "mode": "0750", "owner": "root", "path": "/etc/grafana/provisioning/datasources", "secontext": "system_u:object_r:grafana_conf_t:s0", "size": 30, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.private_metrics_subrole_grafana : Ensure Grafana service is configured with datasources] *** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_grafana/tasks/main.yml:64 Tuesday 19 November 2024 14:39:51 -0500 (0:00:00.571) 0:00:51.214 ****** ok: [managed-node3] => { "changed": false, "checksum": "34116c67b70835430a24bb4b09b96ea7eeeb3d9d", "dest": "/etc/grafana/provisioning/datasources/grafana-pcp.yaml", "gid": 0, "group": "root", "mode": "0644", "owner": "root", "path": "/etc/grafana/provisioning/datasources/grafana-pcp.yaml", "secontext": "system_u:object_r:grafana_conf_t:s0", "size": 473, "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.private_metrics_subrole_grafana : Ensure graphing service is running and enabled on boot] *** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_grafana/tasks/main.yml:71 Tuesday 19 November 2024 14:39:52 -0500 (0:00:00.868) 0:00:52.083 ****** ok: [managed-node3] => { "changed": false, "enabled": true, "name": "grafana-server", "state": "started", "status": { "ActiveEnterTimestamp": "Tue 2024-11-19 14:38:56 EST", "ActiveEnterTimestampMonotonic": "530002109", "ActiveExitTimestamp": "Tue 2024-11-19 14:38:56 EST", "ActiveExitTimestampMonotonic": "529102114", "ActiveState": "active", "After": "basic.target sysinit.target postgresql.service -.mount systemd-tmpfiles-setup.service network-online.target system.slice tmp.mount systemd-journald.socket mysqld.service mariadb.service", "AllowIsolate": "no", "AllowedCPUs": "", "AllowedMemoryNodes": "", "AmbientCapabilities": "", "AssertResult": "yes", "AssertTimestamp": "Tue 2024-11-19 14:38:56 EST", "AssertTimestampMonotonic": "529148785", "Before": "shutdown.target multi-user.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "no", "CPUAffinity": "", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Tue 2024-11-19 14:38:56 EST", "ConditionTimestampMonotonic": "529148784", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroup": "/system.slice/grafana-server.service", "ControlPID": "0", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "Grafana instance", "DevicePolicy": "closed", "Documentation": "http://docs.grafana.org", "DynamicUser": "no", "EffectiveCPUs": "", "EffectiveMemoryNodes": "", "EnvironmentFiles": "/etc/sysconfig/grafana-server (ignore_errors=no)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "34903", "ExecMainStartTimestamp": "Tue 2024-11-19 14:38:56 EST", "ExecMainStartTimestampMonotonic": "529150395", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/sbin/grafana-server ; argv[]=/usr/sbin/grafana-server --config=${CONF_FILE} --pidfile=${PID_FILE_DIR}/grafana-server.pid --packaging=rpm cfg:default.paths.logs=${LOG_DIR} cfg:default.paths.data=${DATA_DIR} cfg:default.paths.plugins=${PLUGINS_DIR} cfg:default.paths.provisioning=${PROVISIONING_CFG_DIR} ; ignore_errors=no ; start_time=[Tue 2024-11-19 14:38:56 EST] ; stop_time=[n/a] ; pid=34903 ; code=(null) ; status=0/0 }", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FragmentPath": "/usr/lib/systemd/system/grafana-server.service", "FreezerState": "running", "GID": "988", "Group": "grafana", "GuessMainPID": "yes", "IOAccounting": "no", "IOSchedulingClass": "0", "IOSchedulingPriority": "0", "IOWeight": "[not set]", "IPAccounting": "no", "IPEgressBytes": "18446744073709551615", "IPEgressPackets": "18446744073709551615", "IPIngressBytes": "18446744073709551615", "IPIngressPackets": "18446744073709551615", "Id": "grafana-server.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestamp": "Tue 2024-11-19 14:38:56 EST", "InactiveEnterTimestampMonotonic": "529147294", "InactiveExitTimestamp": "Tue 2024-11-19 14:38:56 EST", "InactiveExitTimestampMonotonic": "529150440", "InvocationID": "e1343c787b8d4eac8e04d430f667f8e0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "0", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "65536", "LimitMEMLOCKSoft": "65536", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "10000", "LimitNOFILESoft": "10000", "LimitNPROC": "14003", "LimitNPROCSoft": "14003", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "14003", "LimitSIGPENDINGSoft": "14003", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "yes", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "34903", "MemoryAccounting": "yes", "MemoryCurrent": "63037440", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "MountFlags": "", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAMask": "", "NUMAPolicy": "n/a", "Names": "grafana-server.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "yes", "NonBlocking": "no", "NotifyAccess": "main", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "PermissionsStartOnly": "no", "Perpetual": "no", "PrivateDevices": "yes", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "yes", "PrivateUsers": "no", "ProtectControlGroups": "yes", "ProtectHome": "yes", "ProtectKernelModules": "yes", "ProtectKernelTunables": "yes", "ProtectSystem": "full", "RefuseManualStart": "no", "RefuseManualStop": "no", "RemainAfterExit": "no", "RemoveIPC": "yes", "Requires": "system.slice sysinit.target -.mount", "RequiresMountsFor": "/var/tmp /usr/share/grafana /run/grafana", "Restart": "on-failure", "RestartUSec": "100ms", "RestrictNamespaces": "yes", "RestrictRealtime": "yes", "RestrictSUIDSGID": "yes", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectory": "grafana", "RuntimeDirectoryMode": "0750", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system.slice", "StandardError": "inherit", "StandardInput": "null", "StandardInputData": "", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestamp": "Tue 2024-11-19 14:38:56 EST", "StateChangeTimestampMonotonic": "530002109", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "running", "SuccessAction": "none", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallArchitectures": "native", "SystemCallErrorNumber": "0", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "13", "TasksMax": "22405", "TimeoutStartUSec": "1min 30s", "TimeoutStopUSec": "20s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "988", "UMask": "0027", "UnitFilePreset": "disabled", "UnitFileState": "enabled", "User": "grafana", "UtmpMode": "init", "WantedBy": "multi-user.target", "Wants": "network-online.target", "WatchdogTimestamp": "Tue 2024-11-19 14:38:56 EST", "WatchdogTimestampMonotonic": "530002105", "WatchdogUSec": "0", "WorkingDirectory": "/usr/share/grafana" } } TASK [fedora.linux_system_roles.private_metrics_subrole_grafana : Ensure graphing service settings are configured by HTTP POST] *** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_grafana/tasks/main.yml:78 Tuesday 19 November 2024 14:39:52 -0500 (0:00:00.684) 0:00:52.767 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "grafana_version is version('7.5.0', '<')", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.private_metrics_subrole_grafana : Ensure graphing service settings are configured by provisioning] *** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_grafana/tasks/main.yml:89 Tuesday 19 November 2024 14:39:52 -0500 (0:00:00.051) 0:00:52.819 ****** ok: [managed-node3] => { "changed": false, "checksum": "9e0223cef1828dcdd1326014cc4532b478a526d1", "dest": "/etc/grafana/provisioning/plugins/grafana-pcp.yaml", "gid": 988, "group": "grafana", "mode": "0640", "owner": "root", "path": "/etc/grafana/provisioning/plugins/grafana-pcp.yaml", "secontext": "system_u:object_r:grafana_conf_t:s0", "size": 165, "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.metrics : Configure firewall] ****************** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:102 Tuesday 19 November 2024 14:39:53 -0500 (0:00:01.020) 0:00:53.839 ****** included: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/firewall.yml for managed-node3 TASK [fedora.linux_system_roles.metrics : Initialize __metrics_firewall] ******* task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/firewall.yml:9 Tuesday 19 November 2024 14:39:54 -0500 (0:00:00.183) 0:00:54.023 ****** ok: [managed-node3] => { "ansible_facts": { "__metrics_firewall": [] }, "changed": false } TASK [fedora.linux_system_roles.metrics : Port for pmcd] *********************** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/firewall.yml:13 Tuesday 19 November 2024 14:39:54 -0500 (0:00:00.060) 0:00:54.084 ****** ok: [managed-node3] => { "ansible_facts": { "__metrics_firewall": [ { "port": "44321/tcp", "state": "enabled" } ] }, "changed": false } TASK [fedora.linux_system_roles.metrics : Port for pmproxy used by query and grafana] *** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/firewall.yml:19 Tuesday 19 November 2024 14:39:54 -0500 (0:00:00.059) 0:00:54.143 ****** ok: [managed-node3] => { "ansible_facts": { "__metrics_firewall": [ { "port": "44321/tcp", "state": "enabled" }, { "port": "44322/tcp", "state": "enabled" } ] }, "changed": false } TASK [fedora.linux_system_roles.metrics : Service for grafana] ***************** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/firewall.yml:25 Tuesday 19 November 2024 14:39:54 -0500 (0:00:00.069) 0:00:54.212 ****** ok: [managed-node3] => { "ansible_facts": { "__metrics_firewall": [ { "port": "44321/tcp", "state": "enabled" }, { "port": "44322/tcp", "state": "enabled" }, { "service": "grafana", "state": "enabled" } ] }, "changed": false } TASK [fedora.linux_system_roles.metrics : Service for valkey] ****************** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/firewall.yml:31 Tuesday 19 November 2024 14:39:54 -0500 (0:00:00.084) 0:00:54.297 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "ansible_facts['distribution_version'] is version('10', '>=')", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.metrics : Service for redis] ******************* task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/firewall.yml:38 Tuesday 19 November 2024 14:39:54 -0500 (0:00:00.079) 0:00:54.377 ****** ok: [managed-node3] => { "ansible_facts": { "__metrics_firewall": [ { "port": "44321/tcp", "state": "enabled" }, { "port": "44322/tcp", "state": "enabled" }, { "service": "grafana", "state": "enabled" }, { "service": "redis", "state": "enabled" } ] }, "changed": false } TASK [Ensure the service and the port status with the firewall role] *********** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/firewall.yml:45 Tuesday 19 November 2024 14:39:54 -0500 (0:00:00.069) 0:00:54.446 ****** TASK [fedora.linux_system_roles.firewall : Setup firewalld] ******************** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:2 Tuesday 19 November 2024 14:39:54 -0500 (0:00:00.106) 0:00:54.553 ****** included: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml for managed-node3 TASK [fedora.linux_system_roles.firewall : Ensure ansible_facts used by role] *** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:2 Tuesday 19 November 2024 14:39:54 -0500 (0:00:00.106) 0:00:54.660 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__firewall_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Check if system is ostree] ********** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:10 Tuesday 19 November 2024 14:39:54 -0500 (0:00:00.065) 0:00:54.725 ****** ok: [managed-node3] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.firewall : Set flag to indicate system is ostree] *** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:15 Tuesday 19 November 2024 14:39:55 -0500 (0:00:00.601) 0:00:55.326 ****** ok: [managed-node3] => { "ansible_facts": { "__firewall_is_ostree": false }, "changed": false } TASK [fedora.linux_system_roles.firewall : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:22 Tuesday 19 November 2024 14:39:55 -0500 (0:00:00.082) 0:00:55.409 ****** ok: [managed-node3] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.firewall : Set flag if transactional-update exists] *** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:27 Tuesday 19 November 2024 14:39:56 -0500 (0:00:00.593) 0:00:56.002 ****** ok: [managed-node3] => { "ansible_facts": { "__firewall_is_transactional": false }, "changed": false } TASK [fedora.linux_system_roles.firewall : Install firewalld] ****************** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:31 Tuesday 19 November 2024 14:39:56 -0500 (0:00:00.265) 0:00:56.268 ****** ok: [managed-node3] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do TASK [fedora.linux_system_roles.firewall : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:43 Tuesday 19 November 2024 14:39:59 -0500 (0:00:03.171) 0:00:59.439 ****** skipping: [managed-node3] => { "false_condition": "__firewall_is_transactional | d(false)" } TASK [fedora.linux_system_roles.firewall : Reboot transactional update systems] *** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:48 Tuesday 19 November 2024 14:39:59 -0500 (0:00:00.096) 0:00:59.536 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__firewall_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Fail if reboot is needed and not set] *** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:53 Tuesday 19 November 2024 14:39:59 -0500 (0:00:00.110) 0:00:59.647 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__firewall_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Collect service facts] ************** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:5 Tuesday 19 November 2024 14:39:59 -0500 (0:00:00.130) 0:00:59.777 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Attempt to stop and disable conflicting services] *** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:9 Tuesday 19 November 2024 14:39:59 -0500 (0:00:00.073) 0:00:59.851 ****** skipping: [managed-node3] => (item=nftables) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": "nftables", "skip_reason": "Conditional result was False" } skipping: [managed-node3] => (item=iptables) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": "iptables", "skip_reason": "Conditional result was False" } skipping: [managed-node3] => (item=ufw) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": "ufw", "skip_reason": "Conditional result was False" } skipping: [managed-node3] => { "changed": false } MSG: All items skipped TASK [fedora.linux_system_roles.firewall : Unmask firewalld service] *********** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:22 Tuesday 19 November 2024 14:40:00 -0500 (0:00:00.210) 0:01:00.062 ****** ok: [managed-node3] => { "changed": false, "name": "firewalld", "status": { "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "polkit.service sysinit.target basic.target dbus.socket system.slice dbus.service", "AllowIsolate": "no", "AllowedCPUs": "", "AllowedMemoryNodes": "", "AmbientCapabilities": "", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "shutdown.target network-pre.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "BusName": "org.fedoraproject.FirewallD1", "CPUAccounting": "no", "CPUAffinity": "", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "yes", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "iptables.service ebtables.service ipset.service ip6tables.service nftables.service shutdown.target", "ControlPID": "0", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "firewalld - dynamic firewall daemon", "DevicePolicy": "auto", "Documentation": "man:firewalld(1)", "DynamicUser": "no", "EffectiveCPUs": "", "EffectiveMemoryNodes": "", "EnvironmentFiles": "/etc/sysconfig/firewalld (ignore_errors=yes)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecReload": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStart": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FragmentPath": "/usr/lib/systemd/system/firewalld.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOSchedulingClass": "0", "IOSchedulingPriority": "0", "IOWeight": "[not set]", "IPAccounting": "no", "IPEgressBytes": "18446744073709551615", "IPEgressPackets": "18446744073709551615", "IPIngressBytes": "18446744073709551615", "IPIngressPackets": "18446744073709551615", "Id": "firewalld.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "0", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "65536", "LimitMEMLOCKSoft": "65536", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "262144", "LimitNOFILESoft": "1024", "LimitNPROC": "14003", "LimitNPROCSoft": "14003", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "14003", "LimitSIGPENDINGSoft": "14003", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "MemoryAccounting": "yes", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "MountFlags": "", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAMask": "", "NUMAPolicy": "n/a", "Names": "firewalld.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "PermissionsStartOnly": "no", "Perpetual": "no", "PrivateDevices": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "sysinit.target dbus.socket system.slice", "Restart": "no", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system.slice", "StandardError": "null", "StandardInput": "null", "StandardInputData": "", "StandardOutput": "null", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "0", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22405", "TimeoutStartUSec": "1min 30s", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "dbus", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "enabled", "UnitFileState": "disabled", "UtmpMode": "init", "Wants": "network-pre.target", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.firewall : Enable and start firewalld service] *** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:28 Tuesday 19 November 2024 14:40:00 -0500 (0:00:00.773) 0:01:00.835 ****** changed: [managed-node3] => { "changed": true, "enabled": true, "name": "firewalld", "state": "started", "status": { "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "polkit.service sysinit.target system.slice dbus.service dbus.socket basic.target", "AllowIsolate": "no", "AllowedCPUs": "", "AllowedMemoryNodes": "", "AmbientCapabilities": "", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "shutdown.target network-pre.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "BusName": "org.fedoraproject.FirewallD1", "CPUAccounting": "no", "CPUAffinity": "", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "yes", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "nftables.service iptables.service ipset.service shutdown.target ebtables.service ip6tables.service", "ControlPID": "0", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "firewalld - dynamic firewall daemon", "DevicePolicy": "auto", "Documentation": "man:firewalld(1)", "DynamicUser": "no", "EffectiveCPUs": "", "EffectiveMemoryNodes": "", "EnvironmentFiles": "/etc/sysconfig/firewalld (ignore_errors=yes)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecReload": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStart": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FragmentPath": "/usr/lib/systemd/system/firewalld.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOSchedulingClass": "0", "IOSchedulingPriority": "0", "IOWeight": "[not set]", "IPAccounting": "no", "IPEgressBytes": "18446744073709551615", "IPEgressPackets": "18446744073709551615", "IPIngressBytes": "18446744073709551615", "IPIngressPackets": "18446744073709551615", "Id": "firewalld.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "0", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "65536", "LimitMEMLOCKSoft": "65536", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "262144", "LimitNOFILESoft": "1024", "LimitNPROC": "14003", "LimitNPROCSoft": "14003", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "14003", "LimitSIGPENDINGSoft": "14003", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "MemoryAccounting": "yes", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "MountFlags": "", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAMask": "", "NUMAPolicy": "n/a", "Names": "firewalld.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "PermissionsStartOnly": "no", "Perpetual": "no", "PrivateDevices": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "dbus.socket sysinit.target system.slice", "Restart": "no", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system.slice", "StandardError": "null", "StandardInput": "null", "StandardInputData": "", "StandardOutput": "null", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "0", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22405", "TimeoutStartUSec": "1min 30s", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "dbus", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "enabled", "UnitFileState": "disabled", "UtmpMode": "init", "Wants": "network-pre.target", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.firewall : Check if previous replaced is defined] *** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:34 Tuesday 19 November 2024 14:40:02 -0500 (0:00:01.258) 0:01:02.094 ****** ok: [managed-node3] => { "ansible_facts": { "__firewall_previous_replaced": false, "__firewall_python_cmd": "/usr/libexec/platform-python", "__firewall_report_changed": true }, "changed": false } TASK [fedora.linux_system_roles.firewall : Get config files, checksums before and remove] *** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:43 Tuesday 19 November 2024 14:40:02 -0500 (0:00:00.125) 0:01:02.220 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__firewall_previous_replaced | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Tell firewall module it is able to report changed] *** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:55 Tuesday 19 November 2024 14:40:02 -0500 (0:00:00.098) 0:01:02.318 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__firewall_previous_replaced | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Configure firewall] ***************** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:71 Tuesday 19 November 2024 14:40:02 -0500 (0:00:00.097) 0:01:02.416 ****** changed: [managed-node3] => (item={'port': '44321/tcp', 'state': 'enabled'}) => { "__firewall_changed": true, "ansible_loop_var": "item", "changed": true, "item": { "port": "44321/tcp", "state": "enabled" } } changed: [managed-node3] => (item={'port': '44322/tcp', 'state': 'enabled'}) => { "__firewall_changed": true, "ansible_loop_var": "item", "changed": true, "item": { "port": "44322/tcp", "state": "enabled" } } changed: [managed-node3] => (item={'service': 'grafana', 'state': 'enabled'}) => { "__firewall_changed": true, "ansible_loop_var": "item", "changed": true, "item": { "service": "grafana", "state": "enabled" } } changed: [managed-node3] => (item={'service': 'redis', 'state': 'enabled'}) => { "__firewall_changed": true, "ansible_loop_var": "item", "changed": true, "item": { "service": "redis", "state": "enabled" } } TASK [fedora.linux_system_roles.firewall : Gather firewall config information] *** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:120 Tuesday 19 November 2024 14:40:05 -0500 (0:00:03.411) 0:01:05.828 ****** skipping: [managed-node3] => (item={'port': '44321/tcp', 'state': 'enabled'}) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall | length == 1", "item": { "port": "44321/tcp", "state": "enabled" }, "skip_reason": "Conditional result was False" } skipping: [managed-node3] => (item={'port': '44322/tcp', 'state': 'enabled'}) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall | length == 1", "item": { "port": "44322/tcp", "state": "enabled" }, "skip_reason": "Conditional result was False" } skipping: [managed-node3] => (item={'service': 'grafana', 'state': 'enabled'}) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall | length == 1", "item": { "service": "grafana", "state": "enabled" }, "skip_reason": "Conditional result was False" } skipping: [managed-node3] => (item={'service': 'redis', 'state': 'enabled'}) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall | length == 1", "item": { "service": "redis", "state": "enabled" }, "skip_reason": "Conditional result was False" } skipping: [managed-node3] => { "changed": false } MSG: All items skipped TASK [fedora.linux_system_roles.firewall : Update firewalld_config fact] ******* task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:130 Tuesday 19 November 2024 14:40:05 -0500 (0:00:00.124) 0:01:05.953 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "firewall | length == 1", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Gather firewall config if no arguments] *** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:139 Tuesday 19 November 2024 14:40:06 -0500 (0:00:00.061) 0:01:06.014 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "firewall == None or firewall | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Update firewalld_config fact] ******* task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:144 Tuesday 19 November 2024 14:40:06 -0500 (0:00:00.061) 0:01:06.075 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "firewall == None or firewall | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Get config files, checksums after] *** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:153 Tuesday 19 November 2024 14:40:06 -0500 (0:00:00.110) 0:01:06.186 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__firewall_previous_replaced | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Calculate what has changed] ********* task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:163 Tuesday 19 November 2024 14:40:06 -0500 (0:00:00.056) 0:01:06.242 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__firewall_previous_replaced | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Show diffs] ************************* task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:169 Tuesday 19 November 2024 14:40:06 -0500 (0:00:00.058) 0:01:06.300 ****** skipping: [managed-node3] => { "false_condition": "__firewall_previous_replaced | bool" } TASK [fedora.linux_system_roles.metrics : Configure selinux] ******************* task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:105 Tuesday 19 November 2024 14:40:06 -0500 (0:00:00.085) 0:01:06.386 ****** included: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/selinux.yml for managed-node3 TASK [fedora.linux_system_roles.metrics : Set pcp_bind_all_unreserved_ports] *** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/selinux.yml:6 Tuesday 19 November 2024 14:40:06 -0500 (0:00:00.118) 0:01:06.504 ****** ok: [managed-node3] => { "ansible_facts": { "__metrics_selinux": [ [ { "name": "pcp_bind_all_unreserved_ports", "state": "on" } ] ] }, "changed": false } TASK [Ensure the port status with the selinux role] **************************** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/selinux.yml:11 Tuesday 19 November 2024 14:40:06 -0500 (0:00:00.052) 0:01:06.557 ****** redirecting (type: modules) ansible.builtin.selinux to ansible.posix.selinux redirecting (type: modules) ansible.builtin.selinux to ansible.posix.selinux redirecting (type: modules) ansible.builtin.seboolean to ansible.posix.seboolean TASK [fedora.linux_system_roles.selinux : Set ansible_facts required by role and install packages] *** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:2 Tuesday 19 November 2024 14:40:06 -0500 (0:00:00.120) 0:01:06.678 ****** included: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/set_facts_packages.yml for managed-node3 TASK [fedora.linux_system_roles.selinux : Ensure ansible_facts used by role] *** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/set_facts_packages.yml:2 Tuesday 19 November 2024 14:40:06 -0500 (0:00:00.128) 0:01:06.806 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__selinux_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Ensure SELinux packages] ************* task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/set_facts_packages.yml:7 Tuesday 19 November 2024 14:40:06 -0500 (0:00:00.057) 0:01:06.863 ****** included: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml for managed-node3 TASK [fedora.linux_system_roles.selinux : Check if system is ostree] *********** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:5 Tuesday 19 November 2024 14:40:06 -0500 (0:00:00.085) 0:01:06.948 ****** ok: [managed-node3] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.selinux : Set flag to indicate system is ostree] *** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:10 Tuesday 19 November 2024 14:40:07 -0500 (0:00:00.679) 0:01:07.628 ****** ok: [managed-node3] => { "ansible_facts": { "__selinux_is_ostree": false }, "changed": false } TASK [fedora.linux_system_roles.selinux : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:17 Tuesday 19 November 2024 14:40:07 -0500 (0:00:00.054) 0:01:07.682 ****** ok: [managed-node3] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.selinux : Set flag if transactional-update exists] *** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:22 Tuesday 19 November 2024 14:40:08 -0500 (0:00:00.544) 0:01:08.226 ****** ok: [managed-node3] => { "ansible_facts": { "__selinux_is_transactional": false }, "changed": false } TASK [fedora.linux_system_roles.selinux : Install SELinux python2 tools] ******* task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:26 Tuesday 19 November 2024 14:40:08 -0500 (0:00:00.053) 0:01:08.279 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "ansible_python_version is version('3', '<')", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Install SELinux python3 tools] ******* task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:35 Tuesday 19 November 2024 14:40:08 -0500 (0:00:00.045) 0:01:08.324 ****** ok: [managed-node3] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do TASK [fedora.linux_system_roles.selinux : Install SELinux python3 tools] ******* task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:46 Tuesday 19 November 2024 14:40:11 -0500 (0:00:03.066) 0:01:11.391 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "ansible_os_family == \"Suse\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Install SELinux tool semanage] ******* task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:58 Tuesday 19 November 2024 14:40:11 -0500 (0:00:00.048) 0:01:11.439 ****** ok: [managed-node3] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do TASK [fedora.linux_system_roles.selinux : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:72 Tuesday 19 November 2024 14:40:14 -0500 (0:00:03.064) 0:01:14.503 ****** skipping: [managed-node3] => { "false_condition": "__selinux_is_transactional | d(false)" } TASK [fedora.linux_system_roles.selinux : Reboot transactional update systems] *** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:77 Tuesday 19 November 2024 14:40:14 -0500 (0:00:00.039) 0:01:14.542 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__selinux_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Fail if reboot is needed and not set] *** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:82 Tuesday 19 November 2024 14:40:14 -0500 (0:00:00.041) 0:01:14.584 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__selinux_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Refresh facts] *********************** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:89 Tuesday 19 November 2024 14:40:14 -0500 (0:00:00.040) 0:01:14.625 ****** ok: [managed-node3] TASK [fedora.linux_system_roles.selinux : Set permanent SELinux state if enabled] *** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:5 Tuesday 19 November 2024 14:40:15 -0500 (0:00:00.941) 0:01:15.566 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "ansible_selinux.status == \"enabled\" and (selinux_state or selinux_policy)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Set permanent SELinux state if disabled] *** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:13 Tuesday 19 November 2024 14:40:15 -0500 (0:00:00.038) 0:01:15.604 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "ansible_selinux.status == \"disabled\" and selinux_state", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Set selinux_reboot_required] ********* task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:21 Tuesday 19 November 2024 14:40:15 -0500 (0:00:00.047) 0:01:15.651 ****** ok: [managed-node3] => { "ansible_facts": { "selinux_reboot_required": false }, "changed": false } TASK [fedora.linux_system_roles.selinux : Fail if reboot is required] ********** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:25 Tuesday 19 November 2024 14:40:15 -0500 (0:00:00.048) 0:01:15.700 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "selinux_reboot_required", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Warn if SELinux is disabled] ********* task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:30 Tuesday 19 November 2024 14:40:15 -0500 (0:00:00.041) 0:01:15.742 ****** skipping: [managed-node3] => { "false_condition": "ansible_selinux.status == \"disabled\"" } TASK [fedora.linux_system_roles.selinux : Drop all local modifications] ******** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:35 Tuesday 19 November 2024 14:40:15 -0500 (0:00:00.076) 0:01:15.818 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "selinux_all_purge | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Purge all SELinux boolean local modifications] *** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:43 Tuesday 19 November 2024 14:40:15 -0500 (0:00:00.038) 0:01:15.856 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "selinux_booleans_purge | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Purge all SELinux file context local modifications] *** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:48 Tuesday 19 November 2024 14:40:15 -0500 (0:00:00.039) 0:01:15.896 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "selinux_fcontexts_purge | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Purge all SELinux port local modifications] *** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:53 Tuesday 19 November 2024 14:40:15 -0500 (0:00:00.038) 0:01:15.935 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "selinux_ports_purge | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Purge all SELinux login local modifications] *** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:58 Tuesday 19 November 2024 14:40:16 -0500 (0:00:00.038) 0:01:15.973 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "selinux_logins_purge | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Set SELinux booleans] **************** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:63 Tuesday 19 November 2024 14:40:16 -0500 (0:00:00.039) 0:01:16.013 ****** redirecting (type: modules) ansible.builtin.seboolean to ansible.posix.seboolean redirecting (type: modules) ansible.builtin.seboolean to ansible.posix.seboolean changed: [managed-node3] => (item={'name': 'pcp_bind_all_unreserved_ports', 'state': 'on'}) => { "__selinux_item": { "name": "pcp_bind_all_unreserved_ports", "state": "on" }, "ansible_loop_var": "__selinux_item", "changed": true, "name": "pcp_bind_all_unreserved_ports", "persistent": false, "state": true } TASK [fedora.linux_system_roles.selinux : Set SELinux file contexts] *********** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:74 Tuesday 19 November 2024 14:40:16 -0500 (0:00:00.734) 0:01:16.747 ****** skipping: [managed-node3] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.selinux : Set an SELinux label on a port] ****** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:87 Tuesday 19 November 2024 14:40:16 -0500 (0:00:00.037) 0:01:16.784 ****** skipping: [managed-node3] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.selinux : Set linux user to SELinux user mapping] *** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:99 Tuesday 19 November 2024 14:40:16 -0500 (0:00:00.056) 0:01:16.841 ****** skipping: [managed-node3] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.selinux : Get SELinux modules facts] *********** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:112 Tuesday 19 November 2024 14:40:16 -0500 (0:00:00.066) 0:01:16.907 ****** ok: [managed-node3] => { "ansible_facts": { "selinux_checksums": true, "selinux_installed_modules": { "abrt": { "100": { "checksum": "sha256:13dad22da122be9f7d5df4dbedae6a515323542fdc1a7e466d7a1a3d36d29731", "enabled": 1 } }, "accountsd": { "100": { "checksum": "sha256:8bd25829d921be0b5adf92ddaca7ab94cedca1d57796749cfa63571b6550e3da", "enabled": 1 } }, "acct": { "100": { "checksum": "sha256:2699d826efd46176017695c768804c505a54b277b05f1feb9c43a613bab4e6aa", "enabled": 1 } }, "afs": { "100": { "checksum": "sha256:99920dd4e0855870f7e6f9666928d13fe18ddccca9d38b92ea70a6ce3c8c7539", "enabled": 1 } }, "aiccu": { "100": { "checksum": "sha256:a7aedc8354b4335412871adfd2ab5b0c6da1ea63c8dd797718e4214a5d511bb5", "enabled": 1 } }, "aide": { "100": { "checksum": "sha256:8adb5c3a5ed74695e975eecbf290640b179eb6345a7740745ecfe3164efe209f", "enabled": 1 } }, "ajaxterm": { "100": { "checksum": "sha256:d3a03c2837d5dde7145e27902ff8578e00734ab34e8ea1a45aee58b83e9ad6d1", "enabled": 1 } }, "alsa": { "100": { "checksum": "sha256:202f94345fba8f4bc942dc9b75bbb6eea3b4cb02411cf6ed79858d72aa883c89", "enabled": 1 } }, "amanda": { "100": { "checksum": "sha256:f9a99d97370017307349a154ce479969395bbbfe434e4829573269f770efdd0d", "enabled": 1 } }, "amtu": { "100": { "checksum": "sha256:bc9934a2ae61fa117614f201479966d788484f3a7382de4ebad99790a465e2b7", "enabled": 1 } }, "anaconda": { "100": { "checksum": "sha256:b8aabc624243533d483c3dd5574a490a43e7ec0f2f7940798c12b4089bbd0642", "enabled": 1 } }, "antivirus": { "100": { "checksum": "sha256:1de6460ccaea5a5749eba17489b9765035c8202eb9492485ff39157564001a2c", "enabled": 1 } }, "apache": { "100": { "checksum": "sha256:1a0c38364558bebdae3efaa1fcf8be232184dcddcaab345bba1c40bf239dd0ed", "enabled": 1 } }, "apcupsd": { "100": { "checksum": "sha256:175308edb201092c22791f419d32da3f661e7ccfb9c5d5855ad753405c10023b", "enabled": 1 } }, "apm": { "100": { "checksum": "sha256:a1410f65d6bf017caedaffaa59016877686099fb7df3c4d801136de79a61795e", "enabled": 1 } }, "application": { "100": { "checksum": "sha256:a8e9d90aa1188068ca66be55c4d8abf9982666171bbdd8d4da1f2a254c34a080", "enabled": 1 } }, "arpwatch": { "100": { "checksum": "sha256:2cb8afd237d6bc5693e5d54be1a455b6ed632fbbe76cea406163f9c48d00e79f", "enabled": 1 } }, "asterisk": { "100": { "checksum": "sha256:0b66b387174001e926cf1454c3516bb32d96610a0f598065fe6d7a917ca897fe", "enabled": 1 } }, "auditadm": { "100": { "checksum": "sha256:dcd9e7f5e71fb9f7aace30b5755efcbf85fe88f884d4253cc9abcad1c44e5f4d", "enabled": 1 } }, "authconfig": { "100": { "checksum": "sha256:bdb8072e463c84cb01e6933093428be2b6ee5299d82e26730b12dd2b66d89355", "enabled": 1 } }, "authlogin": { "100": { "checksum": "sha256:a89b04c7a40bb373de2bbb0a2210cca454e7d4a805321fbe65462ae5551db656", "enabled": 1 } }, "automount": { "100": { "checksum": "sha256:41ec4e0c5c46118cb4dfa8c8b1834f330dce4ffdea3d534a8d5007a63b3e5262", "enabled": 1 } }, "avahi": { "100": { "checksum": "sha256:7628cb8340258102798a6e36902d0210e2051ffb9fb4f7a1e4c62a612edfe6fa", "enabled": 1 } }, "awstats": { "100": { "checksum": "sha256:9b92e64a3331076ad443862aa2ba98a2c4d9b00638bf19bb9726f572dee5eff4", "enabled": 1 } }, "bacula": { "100": { "checksum": "sha256:32cedcc57f6a973ac5adc16d8df343fc1ca4b3716f7cdcdae0d2490a6e5765ac", "enabled": 1 } }, "base": { "100": { "checksum": "sha256:d99ed290beecf2b10a557a21b06b63cabc28dab4050f2e7197d2cb9e30519fd3", "enabled": 1 } }, "bcfg2": { "100": { "checksum": "sha256:ea510637d47b7fabc3f617f8a6f3ca3172bf9215c2d6b64ad19cd5d8819c8b6b", "enabled": 1 } }, "bind": { "100": { "checksum": "sha256:39520749f8aba46f975a87187975d8dcd014ad67d22515951f51fa3fd1b0478f", "enabled": 1 } }, "bitlbee": { "100": { "checksum": "sha256:bf04e481614825a35c26a547b19098ff1c8acd0d915c5b4f938b9fa595459d00", "enabled": 1 } }, "blkmapd": { "100": { "checksum": "sha256:ca870c95742bf987a2e739286cbcb998b58c091a422251fdd8de57228b28fd96", "enabled": 1 } }, "blueman": { "100": { "checksum": "sha256:7e4b2b3df3962273436b561c806c816fe4b1e5d6781efa33a7109b05f796edd7", "enabled": 1 } }, "bluetooth": { "100": { "checksum": "sha256:da457ef2ce595c3bf9f70697029ea90e96472ae562f685a7f919a7778a778d09", "enabled": 1 } }, "boinc": { "100": { "checksum": "sha256:d74bd3b6b3850c30b5bbf77822ab82b43f36600e4f76cd68674ef361328afb05", "enabled": 1 } }, "boltd": { "100": { "checksum": "sha256:4ccf41e247c5a7066042a0ebaae492805a1d640f777e8e771701f340a76bce30", "enabled": 1 } }, "bootloader": { "100": { "checksum": "sha256:46e55021d6c4cede091a992ab33521bb1aba4ca1d44879d778973b279204933c", "enabled": 1 } }, "brctl": { "100": { "checksum": "sha256:f9645adde2441e43369a255c6a194f01c6f5800347ad710ce3e147df884b98aa", "enabled": 1 } }, "brltty": { "100": { "checksum": "sha256:603734d4772f482f282eb217c03647f705d66de27fc927c64e02787369b0f78a", "enabled": 1 } }, "bugzilla": { "100": { "checksum": "sha256:326d2a188603c908cdae3c9dcdae6bda37b98ec4cc23f3b31878e2bbd0cd33b2", "enabled": 1 } }, "bumblebee": { "100": { "checksum": "sha256:e8ca8d5318a68243441fdb993fbab6d566f7462fd5557b55733f8ddbfcc4b276", "enabled": 1 } }, "cachefilesd": { "100": { "checksum": "sha256:86fe9c1aa8b2d7a6bdd9bd8d0c7a41a7ae0e4e14e32eaea6cb920367c2f495d7", "enabled": 1 } }, "calamaris": { "100": { "checksum": "sha256:1069377693a5d730d57e4ddd6f73ce20b67b595aae90a16459e852d238163b48", "enabled": 1 } }, "callweaver": { "100": { "checksum": "sha256:880b626c3d04c5669d64ee617ee36a18566e91adeaac67b9527b0a795543575e", "enabled": 1 } }, "canna": { "100": { "checksum": "sha256:b9256764ca5e34142e8cffea57fafc2fa66f78dc8c05761f97fa9becd1d77311", "enabled": 1 } }, "ccs": { "100": { "checksum": "sha256:ad293ee5e252966d14fa6bf09240f143460df4b928672a33a398a5793777c4e4", "enabled": 1 } }, "cdrecord": { "100": { "checksum": "sha256:dda8d62c3bf2503ff9762bd031c35a76cac8059d08592fe23e4d3fe11b0ac8cc", "enabled": 1 } }, "certmaster": { "100": { "checksum": "sha256:b431dd84f2c6b971bc573674fa6c4ee2fedf910b0123ba5d9acb5011c208fd72", "enabled": 1 } }, "certmonger": { "100": { "checksum": "sha256:965ec65dfc98cbabce2350bd52fa7ce92c2f4ab4704348f1555f2a3d9edfd1b8", "enabled": 1 } }, "certwatch": { "100": { "checksum": "sha256:77f0299f67e43927eacb553d1002beeebc3098b4bee64d8dc3dadb8fd23fbb5c", "enabled": 1 } }, "cfengine": { "100": { "checksum": "sha256:c78b908838f1d64ee9ebb0a51b7fa438527716936471a573e1b4b7c393bd6b8d", "enabled": 1 } }, "cgdcbxd": { "100": { "checksum": "sha256:5d3633e0b77db69721e4f64167d7e5f7779c3e5fa76e095d25f8467f2a0bdfec", "enabled": 1 } }, "cgroup": { "100": { "checksum": "sha256:9368c6c54bd5ec6f20e4c3b47c86e60af07346c4e86e525b6bd7288b54b7e774", "enabled": 1 } }, "chrome": { "100": { "checksum": "sha256:d31ce9d2fe78cafcd5e3c8decf22ae1e9ea6f74026ca65b6320afe9a33cd609a", "enabled": 1 } }, "chronyd": { "100": { "checksum": "sha256:7d9624729861397cf7720c2324c65489a3d30485e6a884ab1ff9a8ca22efa678", "enabled": 1 } }, "cinder": { "100": { "checksum": "sha256:fc169721c78f5b0857ed8312e59ba4c134b685c4c322dae242b92e815e35e6fb", "enabled": 1 } }, "cipe": { "100": { "checksum": "sha256:02c20398b9eff51ed431b7ad739a6015d2451b4bf6e3e5da380606d85a77852c", "enabled": 1 } }, "clock": { "100": { "checksum": "sha256:4f90655d2243cfc32ea7436a953cccb8a34af895f83361235a3a5cda40dbc75f", "enabled": 1 } }, "clogd": { "100": { "checksum": "sha256:ba78a422a10b65591c48cb038f8a55614944163f3140275852d293fb0c548bfa", "enabled": 1 } }, "cloudform": { "100": { "checksum": "sha256:481f5fbc7810a5a81851edbe5a6b124141257f5fbbb83d8830ae0a34924ed3d9", "enabled": 1 } }, "cmirrord": { "100": { "checksum": "sha256:8f8fb986f15b8b7c5c250d250fdbbb2f78874e13394105c9c486488a16e94c91", "enabled": 1 } }, "cobbler": { "100": { "checksum": "sha256:e0e264b9cc83962dbbb27c152a72f01c6a355467c4e845b52e65c8b88d8d75d6", "enabled": 1 } }, "cockpit": { "100": { "checksum": "sha256:cb7fccd94903a6e256a586d758085f6f59c0f8b1c5b4cb99536915526d2224ec", "enabled": 1 } }, "collectd": { "100": { "checksum": "sha256:7f08e2e248d33162dc9b237c37ed3a3dba0511bbcc71d87482e95093fb8c6456", "enabled": 1 } }, "colord": { "100": { "checksum": "sha256:86e58c9f12c519a2c3b090b64a276722374054ea900c775b2f8ab4ef2867dcf0", "enabled": 1 } }, "comsat": { "100": { "checksum": "sha256:1d57ffaad6b96e3ca8ac82c23b52d58d81e1f69f5d54a648a16da8ffa8070e53", "enabled": 1 } }, "condor": { "100": { "checksum": "sha256:dbc3f2f0c12f9aeed14056fd7e7c46a4ecab3569198f891643172cd032f3fc00", "enabled": 1 } }, "conman": { "100": { "checksum": "sha256:1270caf15af248a487cd5ce728daae2699ffd9139823c805ec49213ab1c835cb", "enabled": 1 } }, "conntrackd": { "100": { "checksum": "sha256:56fd7d7a550dbc4188b93afd0fde8c706623b3a5d26db265ee016967ba4ddfee", "enabled": 1 } }, "consolekit": { "100": { "checksum": "sha256:5bd7a7acc191766583d933b04321e64657138959bf40a4d2986b013b942c4ba8", "enabled": 1 } }, "couchdb": { "100": { "checksum": "sha256:12b2e3e7314bda4e76d3883901e6470927e85343f742fb44b174ce968f1ad8b5", "enabled": 1 } }, "courier": { "100": { "checksum": "sha256:40ae5f173004741838002644e5bff73cf16f2f3a1928c45fa17674f9a0df5148", "enabled": 1 } }, "cpucontrol": { "100": { "checksum": "sha256:1485a6d64d00619898d2789d27391f2a57a7fb1f0e8c73daf59baca8641564a3", "enabled": 1 } }, "cpufreqselector": { "100": { "checksum": "sha256:687564eb09acf3e7f1475fe2a133941c36999bd037aa8a794feea2d9f2c26385", "enabled": 1 } }, "cpuplug": { "100": { "checksum": "sha256:c16e376ff6c51da1911e68a8a7d42f5730eda45febfd0875c78cac4b9cf6e78c", "enabled": 1 } }, "cron": { "100": { "checksum": "sha256:6be0252b3c6bcbfb4c51dfd3ae1ae262f5de153234917ac4d342b18ae0292060", "enabled": 1 } }, "ctdb": { "100": { "checksum": "sha256:06dd65a4361bf8076c14b322dd30003295c0b9d75bf1ae610961b13a1f9431da", "enabled": 1 } }, "cups": { "100": { "checksum": "sha256:3d5e5bbf131d98d95f7f1431893eb137bd833dbfd8469f9c386d72bb4e8f9b9a", "enabled": 1 } }, "cvs": { "100": { "checksum": "sha256:bbc8d76cc8609849d5b078c5b2ac7364470a06d77d67b97d5f58429d7b679e33", "enabled": 1 } }, "cyphesis": { "100": { "checksum": "sha256:b1a41211ae3cf69b819df517eccd0fda2088c27685dad68de64531b9794ec518", "enabled": 1 } }, "cyrus": { "100": { "checksum": "sha256:60defb1f6feeb1d607734c4912e52e03bf5b0c27cb6f31a37fa7e05f3497b323", "enabled": 1 } }, "daemontools": { "100": { "checksum": "sha256:1034e2442c975dd2ccf84791b1a826d02032f13762d57c3485e51e2b9a7dc03f", "enabled": 1 } }, "dbadm": { "100": { "checksum": "sha256:40306590ef444152ae18b65040d85442c14853a9cc4c31b0224c4d19517d66ea", "enabled": 1 } }, "dbskk": { "100": { "checksum": "sha256:24559eff82b251f9814ae88c36a7cbacda1ed419a80145aef545306e88cb0da8", "enabled": 1 } }, "dbus": { "100": { "checksum": "sha256:50ea4eb05a06315449092c939e2307436ac6461e47ca69f0d42cc4e321e86280", "enabled": 1 } }, "dcc": { "100": { "checksum": "sha256:06e414b0a83b49968f62018cecde48dcfe68b2e9d699915367b3e04461188a0d", "enabled": 1 } }, "ddclient": { "100": { "checksum": "sha256:73ca2525a14e3161524f6e8fc0d016430a536002f1cb3833db1334670b458436", "enabled": 1 } }, "denyhosts": { "100": { "checksum": "sha256:1bd00b13b9bda18274a771d66f7cba8fe62e5e95ea8f51415da6b1fa1336df1b", "enabled": 1 } }, "devicekit": { "100": { "checksum": "sha256:03b01b781881cc60438bc357bd60596970b8ac019b415969bca8a08358fcbfd1", "enabled": 1 } }, "dhcp": { "100": { "checksum": "sha256:2ad95a78468f7f4ea9a8c044c79c0a4ca9924b41432390ea2863a85c806c9a00", "enabled": 1 } }, "dictd": { "100": { "checksum": "sha256:c30c819f142d3c719d0ec5741af5a65161770ff140097fe63f7559d55b897500", "enabled": 1 } }, "dirsrv": { "100": { "checksum": "sha256:50efdc68200d27ce1a5db99a780aa7b0e84988669961d436d348c7bb8310d181", "enabled": 1 } }, "dirsrv-admin": { "100": { "checksum": "sha256:8d9234157484f6ae8ba22039b44fa19f4de8137be9321e5da393d72d85d89487", "enabled": 1 } }, "dmesg": { "100": { "checksum": "sha256:8b834312a2cb99ab89862f839a1315e78794dd92758785f84c9559285dfbe679", "enabled": 1 } }, "dmidecode": { "100": { "checksum": "sha256:2c7fb8c6c52f385b819713f0444a96cfd4e65b7dcb3ca79b932cc12ad9ce903d", "enabled": 1 } }, "dnsmasq": { "100": { "checksum": "sha256:44f66c5d4f635600ee9d0ba3fdea3896218f1420b5ead89e0f22d71a447f9e97", "enabled": 1 } }, "dnssec": { "100": { "checksum": "sha256:49427a9e92b87db77706e2b81ece254c99d3cd6ba020211e2afae65fab7ad066", "enabled": 1 } }, "dovecot": { "100": { "checksum": "sha256:cc8c3a2ee0233a7f1fdf38837b72ce5fd15efef782a36ab4b9aa2ec339b46fa6", "enabled": 1 } }, "drbd": { "100": { "checksum": "sha256:b66be23c1ded4e548e5369b744c7c2a4dfd7065582517525221177ca67657525", "enabled": 1 } }, "dspam": { "100": { "checksum": "sha256:5dd7221ba40e9b912367289fed8ca116c14da4fb8bd7f28f421c4008855bb9fc", "enabled": 1 } }, "entropyd": { "100": { "checksum": "sha256:0f68aeeb1da72efb8c9b1bb7db0a4180b6938672b16f33d1abcd65f5481d85a9", "enabled": 1 } }, "exim": { "100": { "checksum": "sha256:f4c4473ee49394e0e4629023772464a046c476f92b4a727acdf9f6c92711b952", "enabled": 1 } }, "fail2ban": { "100": { "checksum": "sha256:2383cb88b81bc5d87be9f3201a42da526532c4ea8e6d3b3f5023005c0ddf6f17", "enabled": 1 } }, "fcoe": { "100": { "checksum": "sha256:913e66ac5f5ce364e5ea556acfbf77845c25a4beb5ee64599613aa00127c1492", "enabled": 1 } }, "fetchmail": { "100": { "checksum": "sha256:63f00993bae4285eff5e993d208ea786785c4331e6947b3a48a97d31145b2e98", "enabled": 1 } }, "finger": { "100": { "checksum": "sha256:16c506d472b007f7d36850810ca0fcfd9482d30ce9c0ba790174b78294fd1d74", "enabled": 1 } }, "firewalld": { "100": { "checksum": "sha256:bbf58446f30b93de19e5a19087ee012f8e347fef5e7e8012e64b31a0ec21ab09", "enabled": 1 } }, "firewallgui": { "100": { "checksum": "sha256:b61ff17eee03141c9c7bd79d63331ecea733cba4b5b43b87d5141a40cdccdd69", "enabled": 1 } }, "firstboot": { "100": { "checksum": "sha256:c5540b8385c84075dd657e390d77ae886aa9d74b65444b9aa1d858f375819a8c", "enabled": 1 } }, "fprintd": { "100": { "checksum": "sha256:c1ffb7734a0359a7390830d9c6477ab61c45fc026368bfd5e2246523a6439464", "enabled": 1 } }, "freeipmi": { "100": { "checksum": "sha256:9af2291d75a2d643f53ff7a98bcabf22effb617329178efea45372d714825de1", "enabled": 1 } }, "freqset": { "100": { "checksum": "sha256:28bf77389f3e41743b30727a891609172a891466e92c28a919f43e628cc23a4d", "enabled": 1 } }, "fstools": { "100": { "checksum": "sha256:140caf542903419ee2471fd99ab06aa45899c400402c2580b395b182f24bd225", "enabled": 1 } }, "ftp": { "100": { "checksum": "sha256:7e8456fdf7807b30e1c257e568ba10305696cf5abdebc70988c288079884d46b", "enabled": 1 } }, "fwupd": { "100": { "checksum": "sha256:1dd6a45b73c7ce77a87af1e87354ada5aa5b2841aaaa045a6b4ae3c4d09f0f8b", "enabled": 1 } }, "games": { "100": { "checksum": "sha256:950d8be99d5349a3d893ba601c518e6b2af0d56c5b55514a45dbd8a3c61c9ecc", "enabled": 1 } }, "gdomap": { "100": { "checksum": "sha256:5040cb99d007fe9368bd37a9a6bf82f891c220ef652443896a0f2f6ca6f818e1", "enabled": 1 } }, "geoclue": { "100": { "checksum": "sha256:f0155b43152b6b4b850d1c4fb7daf16fd77992313b8be314ddb4901314bf913d", "enabled": 1 } }, "getty": { "100": { "checksum": "sha256:a60d07665b0ebd25fd54a9d82dad5eb7acbc11a2842dba56d7b9524d26ce14ce", "enabled": 1 } }, "git": { "100": { "checksum": "sha256:5eaccf209092db49c9a48d84e1387c1de76cb153c774c0bd615c001afab28664", "enabled": 1 } }, "gitosis": { "100": { "checksum": "sha256:b522382b64f36cf387cd892b45e916c861bd0a09697bc983eb55b53b0efd3081", "enabled": 1 } }, "glance": { "100": { "checksum": "sha256:2c51d19fca6ee40e137245ecb425edc77666d75c42ba583bf74cf13f10ace055", "enabled": 1 } }, "gnome": { "100": { "checksum": "sha256:420b9cefa6bdb542f6da10de7b36704a91509cf64cd2497e5693a858cfca5e41", "enabled": 1 } }, "gpg": { "100": { "checksum": "sha256:f821aa6ca5837a2d2de8180e74c267f68da951960c989fb13ebde5833c93738e", "enabled": 1 } }, "gpm": { "100": { "checksum": "sha256:bf30c4945be0065672fb47f70ad251b1079ada339f61f2679293cb0226d0d57a", "enabled": 1 } }, "gpsd": { "100": { "checksum": "sha256:5373b2332959d6c41c32160018274ab61e3f1abd0f0a5cc2302c45b141a39a9b", "enabled": 1 } }, "grafana": { "400": { "checksum": "sha256:cfb9c7aa0d74eaaf0b7c414cf4e091c408ac40a4a57b2e3ffa2ef588c560ae22", "enabled": 1 } }, "gssproxy": { "100": { "checksum": "sha256:7528c47be91a81ac19f2f54458309baeb0a232d83a1ccb2bd89fbc8cefb1ddc8", "enabled": 1 } }, "guest": { "100": { "checksum": "sha256:91f43e4d5ae283f0aa13c49efea93293dbdecd2b2f4f75db89371eda65b7523e", "enabled": 1 } }, "hddtemp": { "100": { "checksum": "sha256:f170e1da6acae4fd7108d22c8cf262916e034f0d3edbdebf3265a922a5355373", "enabled": 1 } }, "hostapd": { "100": { "checksum": "sha256:8b15f72328885c08bfda38082a62feeaa2c6692223a4d2bd1a572820d454a742", "enabled": 1 } }, "hostname": { "100": { "checksum": "sha256:e9fc1c4032c0346f751e1ef8ad1b3fe3425401b70a6c4354d4485472288e0bc5", "enabled": 1 } }, "hsqldb": { "100": { "checksum": "sha256:f70b198e5a5157722b69dc89109c4074a475e1085356cc610cc9b700567c154d", "enabled": 1 } }, "hwloc": { "100": { "checksum": "sha256:370e9eea2b927a2715018b667e9a56ad09af301a90811cd9b041da79f5384b38", "enabled": 1 } }, "hypervkvp": { "100": { "checksum": "sha256:b54ce6f4960a02d35e19d60bf8a07f7866514893e3193a5f4822c8580a46caa4", "enabled": 1 } }, "ibacm": { "100": { "checksum": "sha256:663b35f3874583ae074924bc068a8dc4c7c144adb60007da6103d1e3505ee37a", "enabled": 1 } }, "icecast": { "100": { "checksum": "sha256:dedaddef1d7447d25a1e7ff01e60e4545606e556c6770bd3fa94d9331de7a5d7", "enabled": 1 } }, "inetd": { "100": { "checksum": "sha256:ae408578a7160f2feae10269365558c43d9570b392642a92cc20f8ad47c58cce", "enabled": 1 } }, "init": { "100": { "checksum": "sha256:7ff95566a4f2bdb8ca3ec67acdade39e35fdabc57c2f00b989bab3f699f997f8", "enabled": 1 } }, "inn": { "100": { "checksum": "sha256:9ad99284192a443aa582e73b46667388b7a219dafae8dfce71a58a82bbae2f6c", "enabled": 1 } }, "insights_client": { "100": { "checksum": "sha256:0e41289d8dce065dcd41fd6cc1e1282efd4a58e7f9e3a2f1abc32f520fbbcc1e", "enabled": 1 } }, "iodine": { "100": { "checksum": "sha256:32501ab66def044fbc340cb5c656d5743c738bbd6fca5626c36c687419cd8d32", "enabled": 1 } }, "iotop": { "100": { "checksum": "sha256:d15656cd91a4e4e178a13f7cf910cfc552cc30db881a11ec88833f947edb4561", "enabled": 1 } }, "ipmievd": { "100": { "checksum": "sha256:d34fe186922c0e5726ca361343ec3846833ec3e4ab9b019b3d7bac1337383a16", "enabled": 1 } }, "ipsec": { "100": { "checksum": "sha256:d36c66c2c79d338c61c90d4136433e1e3a73435e920eb36d70682dfd5e147e59", "enabled": 1 } }, "iptables": { "100": { "checksum": "sha256:5a674017cc648e3262757464e5413503154cc1f593da545ce2c4f946991012bc", "enabled": 1 } }, "irc": { "100": { "checksum": "sha256:d72428ccbff5521367e00699c142bba64b2bbd44fed35deb29f9530cc0448378", "enabled": 1 } }, "irqbalance": { "100": { "checksum": "sha256:15650b2f39ccdfbcb1e4e867a35fce3c2768097e611e0c8ad9cb79ae6c66dd58", "enabled": 1 } }, "iscsi": { "100": { "checksum": "sha256:ccb27142f793095c79f531aae924baaeee5914c84228a09c09b9eca839f3524e", "enabled": 1 } }, "isns": { "100": { "checksum": "sha256:90b42f610fa328cdfb98bd0450bd052566f203e51e4a913dd6faded6da7fbe2c", "enabled": 1 } }, "jabber": { "100": { "checksum": "sha256:5ad49d140265305dc72781a6826d1de4614a33f83bd512acdc2263038ad41206", "enabled": 1 } }, "jetty": { "100": { "checksum": "sha256:d910afd1bfe836543ded50974dc24ae7bd5fd2609d6a9b2403316dffcd39832d", "enabled": 1 } }, "jockey": { "100": { "checksum": "sha256:d9a67ce1976ed2e79826d25f33dcb0b0bbde6c090600b605bbaaae45856d12f6", "enabled": 1 } }, "journalctl": { "100": { "checksum": "sha256:9ddb71271d0dbe5cede6179c0ca263e297dc6b65197bde2f7b14ce71f8dde369", "enabled": 1 } }, "kdbus": { "100": { "checksum": "sha256:5969c78be4a03cc91e426bc19b13c5188b5bf8ac11f5e2c21c098c3d68a7e3e3", "enabled": 1 } }, "kdump": { "100": { "checksum": "sha256:fdde3852d1decda649133c6345680f9353b86a6da2a98a83a8be101c9c25f103", "enabled": 1 } }, "kdumpgui": { "100": { "checksum": "sha256:66c67280c70a9b897b0f952067438e0eee05f9f48913508b38d745ef88747f32", "enabled": 1 } }, "keepalived": { "100": { "checksum": "sha256:c1177567c7bf67bb2d0de17760cecf56e0bb34f50d6fe060dec64ae97a76ecdb", "enabled": 1 } }, "kerberos": { "100": { "checksum": "sha256:826fbe83705494e009b242b88857c425eacba49aadae506ffa2012c80e60f7ae", "enabled": 1 } }, "keyboardd": { "100": { "checksum": "sha256:f199811d9ddc8db83864a09c543567fcb2f117b3241967b092bff7c9fdbfbfb6", "enabled": 1 } }, "keystone": { "100": { "checksum": "sha256:b0a7227a870ea987035e0cd524ad956a68287d0a67dd7135de41c6d5977ff4c2", "enabled": 1 } }, "kismet": { "100": { "checksum": "sha256:488fb5fd17cf1f630f3e48a853da05f86c06fc58219dc2ae59251865734bf800", "enabled": 1 } }, "kmscon": { "100": { "checksum": "sha256:d64019b11b6a37f6cdc5579d56eb1e19b6a7231501e1cfe2a838d26a2eac6033", "enabled": 1 } }, "kpatch": { "100": { "checksum": "sha256:00070d71dfe2632491305387ffb264127dca4387425015e4cb013d6bce5f95c3", "enabled": 1 } }, "ksmtuned": { "100": { "checksum": "sha256:891f082452240ad2e726bad777ea787d0f0f8695cc2a75f7439a2badda030d24", "enabled": 1 } }, "ktalk": { "100": { "checksum": "sha256:2df6f3dbad4a513ee1c113e496e8d2f5a19f56015f4a21e7478f2f5b53f36359", "enabled": 1 } }, "l2tp": { "100": { "checksum": "sha256:8e4cb0b0e0d1293d669de0b0e50f68d6d6fbe8e8d830a236a1c0e676f2326fb2", "enabled": 1 } }, "ldap": { "100": { "checksum": "sha256:d0177bb5873d0e6f9595020a8f39ba06b19e4636ea610175a3afef4aec2719cb", "enabled": 1 } }, "libraries": { "100": { "checksum": "sha256:6d5f128f2d4fd9137a7c70d0d024703547796a71f70017b3550a31d3450e0435", "enabled": 1 } }, "likewise": { "100": { "checksum": "sha256:e7eebd050230b358b43435d37ce308c3ba15e2516f4045abf7d26f03ebfbc11c", "enabled": 1 } }, "linuxptp": { "100": { "checksum": "sha256:4132cd51913a3044e453ed0b972db2ef511fdc7b2a1b592d1070177651066ab9", "enabled": 1 } }, "lircd": { "100": { "checksum": "sha256:cc81b79d2834e58bef7928f525c1a1eee5547e81d195444b3bc2741e396ae46b", "enabled": 1 } }, "livecd": { "100": { "checksum": "sha256:805c7bc4ded621b44ecf333d558328e115bba652fcbc91f436cefc948497688e", "enabled": 1 } }, "lldpad": { "100": { "checksum": "sha256:358c4b262655cffbf20f7484aedb22f094509f44d52a1fa3efe3edeafd99317e", "enabled": 1 } }, "loadkeys": { "100": { "checksum": "sha256:26f9e78406ecdc968ed670b32db1d10805e66875631558f092f08a6e1f2170dc", "enabled": 1 } }, "locallogin": { "100": { "checksum": "sha256:e07d92775ed25e7a3627bf977452844c67acf473b33075475f433f8be76dd755", "enabled": 1 } }, "lockdev": { "100": { "checksum": "sha256:1f946da2054cc1693209749df12ff01ab6456247d6225733aebb3a7d70a46e20", "enabled": 1 } }, "logadm": { "100": { "checksum": "sha256:70546c4b3d01f15bc7a69747dbb12fc6bcef5d899f6301f62c0c612c7069082a", "enabled": 1 } }, "logging": { "100": { "checksum": "sha256:656067c78ff1246a1a758a213d44307f91cb79336fe74a47015af425e58266fc", "enabled": 1 } }, "logrotate": { "100": { "checksum": "sha256:76cc40f1943fe21959793499bffaf35d0fe53ffc3f6c5a8b31eb96e738a286c2", "enabled": 1 } }, "logwatch": { "100": { "checksum": "sha256:cf4450b03e28762040c29f2a28af238cd4905d1c6bd4c73d656b266c7b9a8a6c", "enabled": 1 } }, "lpd": { "100": { "checksum": "sha256:9358dc35659b9570d3e8119a088b2693d7de505ea25996dc139517a857888857", "enabled": 1 } }, "lsm": { "100": { "checksum": "sha256:1247dc4bccfbc9ee42292db4415b21ae00bdef3dc2faeb267f045413da4a1b1b", "enabled": 1 } }, "lttng-tools": { "100": { "checksum": "sha256:79e4a2224ede13cd5f2c0e6e7c61e83efabaf1d05b86f6f7a710599bfc48edaf", "enabled": 1 } }, "lvm": { "100": { "checksum": "sha256:f56137657dd61a1a8a8844d5d1db01fc03330d17e05457d03f64756b344c32ef", "enabled": 1 } }, "mailman": { "100": { "checksum": "sha256:e47811cf3bd8204eaa02c4aab92f3d426f0a3ef97161e1579845d1e03df1fc1d", "enabled": 1 } }, "mailscanner": { "100": { "checksum": "sha256:8d447072ab5005ead27f1cb4d96dcbedf09a11182f660c6f59c6d56fd81235d8", "enabled": 1 } }, "man2html": { "100": { "checksum": "sha256:224584babd9e83c242d54fd8c5cd03379b0556005268aac22b15734b913f12e6", "enabled": 1 } }, "mandb": { "100": { "checksum": "sha256:ae44b8ec7a90ebbc45fdafe89663197b36e47120ad90eb22b475939055ea6924", "enabled": 1 } }, "mcelog": { "100": { "checksum": "sha256:c5d98ec368b145c74b4bf0ea8da3980b17af0c2d00654c5a6973241625f97b12", "enabled": 1 } }, "mediawiki": { "100": { "checksum": "sha256:43f1c6f7cfdeaa26891824167cf637a8670785c2674b45d85ce4a7ac77190a36", "enabled": 1 } }, "memcached": { "100": { "checksum": "sha256:f0f9c7367e9bd196aa463916bd5aab02f6966dad9564a0f2fd070bb2e8410aeb", "enabled": 1 } }, "milter": { "100": { "checksum": "sha256:db190bacd2b84a29971cd1940cd15d606abbfded5c9b956894717afd91fc7a0d", "enabled": 1 } }, "minidlna": { "100": { "checksum": "sha256:0d6ac660d641c1cf707a814ed08e19b9e21547a3eaa7134cab84dbc5fee6b5b2", "enabled": 1 } }, "minissdpd": { "100": { "checksum": "sha256:dd2ab85bcba6d204f9dbc7304e8a4940e5d1733d4b9cf4fcb0f4072982c585c3", "enabled": 1 } }, "mip6d": { "100": { "checksum": "sha256:406edf2c78ba0e692d5a78f3c5ca8d641d00131b143332adeaad9f325959683a", "enabled": 1 } }, "mirrormanager": { "100": { "checksum": "sha256:7084de59beaaaf4f630357ec53beff8d0a0ee532ac180fe58e23bfe98f1fdaee", "enabled": 1 } }, "miscfiles": { "100": { "checksum": "sha256:7e7e87e302bf847a4c59d69e5af60729e61bada0cc5d6ec17a25a6514476cb48", "enabled": 1 } }, "mock": { "100": { "checksum": "sha256:ae352eccf2f2c9ee8f0d9635517d9ae3c9bba83c617deca8f989e2aae8dd35fa", "enabled": 1 } }, "modemmanager": { "100": { "checksum": "sha256:84a60147d2b0121ff6ede6199583cdb5619480d015b2a675c6a0569f91c12d66", "enabled": 1 } }, "modutils": { "100": { "checksum": "sha256:67c3914aeb25e38fc6bd0793fddc41122dba1547d54e91a78065545fea3b9c87", "enabled": 1 } }, "mojomojo": { "100": { "checksum": "sha256:6030afcea9f8d46f25dd7785737edd25eb0f1e50b76eafe4d9103196b722d47e", "enabled": 1 } }, "mon_statd": { "100": { "checksum": "sha256:6ba3a594d01a11bc32e7cb554f7386314b5089eb4416fb776edb552a7d53c41d", "enabled": 1 } }, "mongodb": { "100": { "checksum": "sha256:1b2d30558bec7fc08d1d388ae2bb0becd2233c99c9fb173fd00809786ce5eed9", "enabled": 1 } }, "motion": { "100": { "checksum": "sha256:346e172be35df168eb0e4fbc8e176b0fda87de9bc5787f7a5ab7667cfe1e3c3b", "enabled": 1 } }, "mount": { "100": { "checksum": "sha256:f66c53d993dcd47ea1ff3d797f8fd69fb8161a4ff8a59f54f66a2de9462a55a7", "enabled": 1 } }, "mozilla": { "100": { "checksum": "sha256:7696dbb77c54531cf2574c7ede9f085cf64611dcf7a612530dce2de19f7a8b9f", "enabled": 1 } }, "mpd": { "100": { "checksum": "sha256:0f67c18c9101b53f57ef857a74d6044701e1d2c347f829a03c0579c545fdbef3", "enabled": 1 } }, "mplayer": { "100": { "checksum": "sha256:f82c0a72506f1011e47ba98e51d5edf906f58fc190d797f5d1a0b8e5cc7d0762", "enabled": 1 } }, "mrtg": { "100": { "checksum": "sha256:afcd9267261b334900420461279b8555fdb4bd783af880fa4606d8afc65e0712", "enabled": 1 } }, "mta": { "100": { "checksum": "sha256:b0f9753424c504a288f55d495105f6d475d69287b718190ae5192cf7d6ddfde6", "enabled": 1 } }, "munin": { "100": { "checksum": "sha256:29f87ec15fa19e975c83288d55e56bab64855a24c4d8826fe4138eda9a46cc97", "enabled": 1 } }, "mysql": { "100": { "checksum": "sha256:b028af8f4e726feb8c26037f7c6d6f97383977bd5ee6141ab4e8e1d096d6481f", "enabled": 1 } }, "mythtv": { "100": { "checksum": "sha256:e025b2dbf50901632da0ee2aa658105a322275eb120d782cbbf25f2895231154", "enabled": 1 } }, "naemon": { "100": { "checksum": "sha256:a19b3b0540dc52d9506ca7e5d804c2fe9115b3ea28bfd9273030e841e12eb277", "enabled": 1 } }, "nagios": { "100": { "checksum": "sha256:39ca80027ac8585f368bcd57f555ba87bf409f7b7d6c4292c09fd06cc1691c80", "enabled": 1 } }, "namespace": { "100": { "checksum": "sha256:ef73850f29b4ff4ff904d506d545bf366fd1e7c2ba82a7a7c9a4513e3eee45d9", "enabled": 1 } }, "ncftool": { "100": { "checksum": "sha256:2c9356101a9ddbec94afdd12ca669ba93a1d422c302f9e17b78b18670617d2a1", "enabled": 1 } }, "netlabel": { "100": { "checksum": "sha256:9a32ce04c1dd8e120588c15b3057f838bedce8f14c91576b667295d47800e0ad", "enabled": 1 } }, "netutils": { "100": { "checksum": "sha256:5e0a20ae09b00fac69ee30a0d55ff73fa692d8350c9c0b0343af61e4f0dd654f", "enabled": 1 } }, "networkmanager": { "100": { "checksum": "sha256:9c67b21155929e43e4efd3fc81a85fddc9f1030b47ee4a275789014c1311b972", "enabled": 1 } }, "ninfod": { "100": { "checksum": "sha256:85cac2885d75522eb07189efcc3feeb7775fc6daf5cf3f1a28a1fd2109fe148c", "enabled": 1 } }, "nis": { "100": { "checksum": "sha256:b5b133d60b98068eb9480c54285050ae9b49d2fb309eac8994cc91c865ee02d4", "enabled": 1 } }, "nova": { "100": { "checksum": "sha256:59919a89d30a5d4b60d6971fa636fb62605d59d214ec614adc279f6cbe2c2b27", "enabled": 1 } }, "nscd": { "100": { "checksum": "sha256:578bc975477539c659f3608b1445a0c7a9bc7c3f2dcf65b3e55f3a3af89ea564", "enabled": 1 } }, "nsd": { "100": { "checksum": "sha256:d5b03cdc6c8bbc222b8e3d30680b1a7d2d1a49837e7d509aafcf6b2a3a32195b", "enabled": 1 } }, "nslcd": { "100": { "checksum": "sha256:18b003071f4c36307616f7d5de8cff6d4e376af31cb96ce1a5ad6ae3011dfd09", "enabled": 1 } }, "ntop": { "100": { "checksum": "sha256:f942c7fbe636b9d60327ef9dade1120340c16a2992a6b50db5fbaecd44ffd63d", "enabled": 1 } }, "ntp": { "100": { "checksum": "sha256:686664a71e74b0edd643ab9d556b1aab092fa707935da5ea928a66f54a3c84e0", "enabled": 1 } }, "numad": { "100": { "checksum": "sha256:dabc5ce6244d0b0939e9a07bd6bc232e8b666529a0b7b29527e586db8224862c", "enabled": 1 } }, "nut": { "100": { "checksum": "sha256:653e708dec531e483992b25944a689ec9369478d039a5ec62c98294ab73ce8c4", "enabled": 1 } }, "nx": { "100": { "checksum": "sha256:4ae55fe839abaaf0ea52b79a5c8f6a906575b83cca29532c2dd52337fb3d5790", "enabled": 1 } }, "obex": { "100": { "checksum": "sha256:7b2c87e864b6008f734e1effa48cee1399f41843b9d80d3fd95fbd19e058598f", "enabled": 1 } }, "oddjob": { "100": { "checksum": "sha256:9de0b544b2373ea0f1b7217f9179898479dbff0da36ea9857783de57d06585cf", "enabled": 1 } }, "opafm": { "100": { "checksum": "sha256:761bf911674d23053eceabbbda8da16c73af5f300929a33a64513dc6e3b2d0af", "enabled": 1 } }, "openct": { "100": { "checksum": "sha256:5674f8e8c975570649e3065460786cb4521a86370bffef5a9de18c69813fe68e", "enabled": 1 } }, "opendnssec": { "100": { "checksum": "sha256:bdef6dbb24ae22548634759ac823a8c3e21fde6368cfdfd742480f7027e63ddd", "enabled": 1 } }, "openfortivpn": { "100": { "checksum": "sha256:1a1bff55993510cb6481383b299e1f1a6349ec76e4947bfc8c5b1347e4d30bf4", "enabled": 1 } }, "openhpid": { "100": { "checksum": "sha256:ad3f3f3ba4442930560b291c022e674e6a50e4a37fe027926299b2f6cdec14bd", "enabled": 1 } }, "openshift": { "100": { "checksum": "sha256:329e4b9d1df5012ace94cbe9cba7dfa7ee7d9f242090072c71aaacbeea78986a", "enabled": 1 } }, "openshift-origin": { "100": { "checksum": "sha256:31cbbb069354f984e4af75b387778fae1ff4dc6c3e60533357d005ffa960b51c", "enabled": 1 } }, "opensm": { "100": { "checksum": "sha256:c0e1bf0a8eb50e0b41fa69bf5b65e2a7c324e4bc7255933a5d2bac3b9ae6f4de", "enabled": 1 } }, "openvpn": { "100": { "checksum": "sha256:a4d12ae8ad77d65d0fcabb20aa4a83886e782d732123f686f88a7d7472384104", "enabled": 1 } }, "openvswitch": { "100": { "checksum": "sha256:a54f8a8ea5abb8a33734ecef9d9ad1c0dd090a6e0c5187e80de52f522d2d5e39", "enabled": 1 } }, "openwsman": { "100": { "checksum": "sha256:d6b7bb8f7749265bdaf938abecb2f8f78c6e9e8dc06c1c26b48da227af5a8654", "enabled": 1 } }, "oracleasm": { "100": { "checksum": "sha256:67e31eec391bac337ebacb78c096589af4b7e8be6aa05c34cf187ba922a2abde", "enabled": 1 } }, "osad": { "100": { "checksum": "sha256:6635ff0231bfc3d88c771553d495941ee0f98871edfe6c86205b087186b3a72f", "enabled": 1 } }, "pads": { "100": { "checksum": "sha256:5b4531e9231d399ebec8e6b6870a812c6a64b2daffde35fa57a009b24a01809f", "enabled": 1 } }, "passenger": { "100": { "checksum": "sha256:912a1c442559d6ab48453d87e2b997bdee3017a54a0b60aeaf7d4603fde0f34b", "enabled": 1 } }, "pcmcia": { "100": { "checksum": "sha256:456b3520c26e5f2a913437318715712ae00f64932a27ab1bb8b8b42e0524fa05", "enabled": 1 } }, "pcp": { "100": { "checksum": "sha256:5302332fba7e6724ab7a3c32bd523b10322c20011c6e42ae4e769a49f3efabdd", "enabled": 1 }, "200": { "checksum": "sha256:d0dac3458c9eec819a431bbf3c9b38251bf288317331817fba9be57a19dc054a", "enabled": 1 } }, "pcscd": { "100": { "checksum": "sha256:2ee37df066a9ff80439b08c092809f3661e2f9a8ad02134e839627fd23a20c1f", "enabled": 1 } }, "pdns": { "100": { "checksum": "sha256:a1a10cd52eb9dd15bc1ccfed440f6b3d235edc7405a3932f81805d8d94000245", "enabled": 1 } }, "pegasus": { "100": { "checksum": "sha256:4280c40629dd111fd1c89ff867ac72d1e7ddde49dc3d286637e6a86b868e2303", "enabled": 1 } }, "permissivedomains": { "100": { "checksum": "sha256:2453bad4ace526f3cf2c60b358e95a5476692ef25da107b10f52f3af27c056d2", "enabled": 1 } }, "pesign": { "100": { "checksum": "sha256:6461acd0385c0b1a32bf646fc9e09da0c7ca513954ed8fe2a03f4ee7f6a64fcf", "enabled": 1 } }, "pingd": { "100": { "checksum": "sha256:f7536a518a046b793ea3f74a67d677b878baac44b28268c5ccecbf10715d89ab", "enabled": 1 } }, "piranha": { "100": { "checksum": "sha256:11436fb7942d28e3eca22bc078ee5475f632d8447008a6414f337d4bbc3515dc", "enabled": 1 } }, "pkcs": { "100": { "checksum": "sha256:c70e17d1a4d347b38fdfbb2a5dab292e3e0c538ea52fb6cfdef2714e130da0b1", "enabled": 1 } }, "pkcs11proxyd": { "100": { "checksum": "sha256:c9582c89cac1546fa1e5bf9802c5a322e52e2529256f9e5922d5813e40be3646", "enabled": 1 } }, "pki": { "100": { "checksum": "sha256:ec40fbe6355370fe69a8ff343744654b06d4134c1518c64269be1f3a49083968", "enabled": 1 } }, "plymouthd": { "100": { "checksum": "sha256:7aa52d533e28a3ebf76d879c24bb4e0a58574033d5af6d4d22b716d1156c3f90", "enabled": 1 } }, "podsleuth": { "100": { "checksum": "sha256:b32a5cc38b8edcc76b94862cee0c822a5b4d095329f53ab6f7cb014c76346e8c", "enabled": 1 } }, "policykit": { "100": { "checksum": "sha256:686d9f7652cb2b3d7ce6af2aa620c14a6cbbbdb8d26b3630cfbf6bc34d9e3e6c", "enabled": 1 } }, "polipo": { "100": { "checksum": "sha256:6098bd8a4f449c01dc7e0f4509663994259fe8848f2f21d1319bf7105bbacc4e", "enabled": 1 } }, "portmap": { "100": { "checksum": "sha256:f561aef22cda98a94a74bedda09645e50066a77a23d3bdcbb1143b0c62ffe7b2", "enabled": 1 } }, "portreserve": { "100": { "checksum": "sha256:9de99e881e9e2e7e0b78629eec721840da4aa18f78ff5a06e46b7a596c28a09a", "enabled": 1 } }, "postfix": { "100": { "checksum": "sha256:3101c4c1d54f3e175dc3fcff001c6937a9ffec7781f4095ea38fea88df7e8067", "enabled": 1 } }, "postgresql": { "100": { "checksum": "sha256:a734cc086d7d73ef2ffe7543f82dc50b57619e78e60664cb67a9513790f3335a", "enabled": 1 } }, "postgrey": { "100": { "checksum": "sha256:ef4d03336b66c1184f352f9b3fe8004d870bbf003673d4393bde24ea14b056b8", "enabled": 1 } }, "ppp": { "100": { "checksum": "sha256:83e6712ba7343dc1346e94c51b75b05839f78bd24f9324d984b7aa9631bd0377", "enabled": 1 } }, "prelink": { "100": { "checksum": "sha256:df050b0d180947788ab45862c4627ae640c92cf0f6a994a685e4cb5fe46bef76", "enabled": 1 } }, "prelude": { "100": { "checksum": "sha256:88c5fa3da64c127ed6e688f9eba5e50a8f6f98ea3243d29b8b0bc0375ef95420", "enabled": 1 } }, "privoxy": { "100": { "checksum": "sha256:e4a84567c63c892d4cdda3a9a4b15ad5188c093da679a354f00c43b6376a844d", "enabled": 1 } }, "procmail": { "100": { "checksum": "sha256:98170eed35b67b9097514bcb044a18cc3f757af5f91b5d870ea707d6048cde75", "enabled": 1 } }, "prosody": { "100": { "checksum": "sha256:07e999e033252b28ae41697ddc23b42dbcf4bdc143c9eb1c55475aabc9fc9caf", "enabled": 1 } }, "psad": { "100": { "checksum": "sha256:7fc3410de486bf89c4d35989937f424b435c9c4f5398f47f9c840b146197c6ac", "enabled": 1 } }, "ptchown": { "100": { "checksum": "sha256:129978bcb62fdcaed728fb288b321c204575246eb535354e02bfd83089cb0ded", "enabled": 1 } }, "publicfile": { "100": { "checksum": "sha256:9cc75080e25fb5602ab266f1c0d0f16843bdfc561e7af6dec32d669e31bebe98", "enabled": 1 } }, "pulseaudio": { "100": { "checksum": "sha256:a41fc5d1275d548510a2be0180741f952f0f696f443eaabf03c1abf3f80f499e", "enabled": 1 } }, "puppet": { "100": { "checksum": "sha256:81559a7d5e16e228382840986ae0e414d4a78163a9b51b5d9c05a58e07574e8d", "enabled": 1 } }, "pwauth": { "100": { "checksum": "sha256:8590f80ce91ddd4862ce2beab9ec64deb66d99c5583ff5ee3cbff2e503caaa37", "enabled": 1 } }, "qmail": { "100": { "checksum": "sha256:917a35c0ec48acfb5166c937e97269acac39541acebad9c1c410bfdbcb483da1", "enabled": 1 } }, "qpid": { "100": { "checksum": "sha256:cfdb156d23ae6c99b3dbac171ab1626202bf1ae7671fae9f6d6f7241116638dd", "enabled": 1 } }, "quantum": { "100": { "checksum": "sha256:eb4881c554de7882b4e5590a8efb35a758fc1b3d61bc1502632d6f4e571cb331", "enabled": 1 } }, "quota": { "100": { "checksum": "sha256:27d1fb8e99c6d1c75fc8efa8aeaf4303d0dcd8d03cb2992d968a3186d648f4b9", "enabled": 1 } }, "rabbitmq": { "100": { "checksum": "sha256:f0b2b81a6670b7640d49d49c364635f39272330f08bcdaa23c681bf2ac64e10f", "enabled": 1 } }, "radius": { "100": { "checksum": "sha256:791a60cff31fca43e01aa4bfe3a57c5938015db44fd1f64064778dbbcdb6e2e2", "enabled": 1 } }, "radvd": { "100": { "checksum": "sha256:1cea7f5b37f7a0e722ecbccaa09d95db2b175ec125d62e3898a99081c51c6f96", "enabled": 1 } }, "raid": { "100": { "checksum": "sha256:a94b0b917312a73eda50ea641dee49eb00f49df286133fcdb13267fd49ce5d1f", "enabled": 1 } }, "rasdaemon": { "100": { "checksum": "sha256:159d40315f3f5086a31e6f0a6a90d342783d6f0c97c5feeb9c92808c7345adcf", "enabled": 1 } }, "rdisc": { "100": { "checksum": "sha256:a61f7efd50387ebfd35b675b22a8cba86c6216c0bbd901aab5e8674b5c442777", "enabled": 1 } }, "readahead": { "100": { "checksum": "sha256:276a24e14ef12f5fadaeab2883d501cb096e01a9ce1be2178a5c50ebfa6b3fcb", "enabled": 1 } }, "realmd": { "100": { "checksum": "sha256:61561d5f14d9a6597d6e312f5429947baab045d01a729f7cc34406e859fa0015", "enabled": 1 } }, "redis": { "100": { "checksum": "sha256:f40066828d25674c525148f890d9cc84ddbb203f5a4aaad616ef2cd3a497fdc3", "enabled": 1 } }, "remotelogin": { "100": { "checksum": "sha256:742f881c1a4838ecfc1a55a7f3b78a72267644e3a64e3ec45a191599b5bd8532", "enabled": 1 } }, "restraint": { "400": { "checksum": "sha256:5dd2b902123ef00065db6ec8d173f37baa26dbe43566bd5f06594ef1243fd5fd", "enabled": 1 } }, "rhcs": { "100": { "checksum": "sha256:67f232676ac23535867e2494f04989dbd6b9b6d4bbc67df67dc2edb4d31a8be8", "enabled": 1 } }, "rhev": { "100": { "checksum": "sha256:ee2f26beaa5c6a5d25e03ef9ab30302d6b29b283283683421fab52e29e47fe3d", "enabled": 1 } }, "rhgb": { "100": { "checksum": "sha256:39c550e1c8b149dc6f308b0f9ef238315208453ee064bb1558eff9137531840f", "enabled": 1 } }, "rhnsd": { "100": { "checksum": "sha256:16bff56244925c7696fa2da5a4c986132488c352149cc88181bf6b4143fc80ba", "enabled": 1 } }, "rhsmcertd": { "100": { "checksum": "sha256:e999510837aabb3ce118ad61225a846f687588e9a321ffe675b56511191bc323", "enabled": 1 } }, "rhts": { "400": { "checksum": "sha256:9000bd99784bc22ffda4493b4985e8c5a2e65e87aeaa1cb96ba82d367a27a8be", "enabled": 1 } }, "ricci": { "100": { "checksum": "sha256:c72c61297cf864a1abda8226de08039c8ae0212808d3f7fd8725b53b955d59f6", "enabled": 1 } }, "rkhunter": { "100": { "checksum": "sha256:d48bd9c5789f4adc396773664402ddeab432caa99597267ccdf24220948e5b3c", "enabled": 1 } }, "rkt": { "100": { "checksum": "sha256:a9414e82cadd2876471465737bd8322eb833e296869ebcefcd9e722ff717d350", "enabled": 1 } }, "rlogin": { "100": { "checksum": "sha256:a4b2e25abc4099a0a54821518b7c824a2ddb7544fb0b5ddde9a0a9be159ac1b2", "enabled": 1 } }, "rngd": { "100": { "checksum": "sha256:5c867af2674586cc1c41aa3203e3704a0d1400d344a8e257bc61e9eebb86ad03", "enabled": 1 } }, "rolekit": { "100": { "checksum": "sha256:73382d4b8a12fa161dbb5ba36c94e7f0b1f82b1abdf0a4f07ca6c981e08f271b", "enabled": 1 } }, "roundup": { "100": { "checksum": "sha256:1a2503ebaa997c6b6efd5d2343ea731f73b2f0312f2e8d5578dad2c8a84a94fa", "enabled": 1 } }, "rpc": { "100": { "checksum": "sha256:e423284f5ed36e7b6c52f581b444a981d5d1c8af6c8dabe8c6cb6c71d3f49fb2", "enabled": 1 } }, "rpcbind": { "100": { "checksum": "sha256:53831134210db04fe6e6b0f05e20b8b7307ae8c11e774faec9e1b3aa2b02b5dc", "enabled": 1 } }, "rpm": { "100": { "checksum": "sha256:acbd671bd661f9f2f25d4798f1646a51075f297c8b086ea9bd3133a00e356432", "enabled": 1 } }, "rrdcached": { "100": { "checksum": "sha256:c6110313310591ee2a08b504b04ebd1b98f370b6633172f06ee7c0c7db0a963d", "enabled": 1 } }, "rshd": { "100": { "checksum": "sha256:1340ab5daac926cc1354452869ab5aa78d27ceb110543624d2ffaf93773c394b", "enabled": 1 } }, "rssh": { "100": { "checksum": "sha256:9dabc52612d567e728786c007f5017c7032c02be3a9201521a530fc91ca789f8", "enabled": 1 } }, "rsync": { "100": { "checksum": "sha256:33dffe2764dc45bbc59b406a67187c39864412bac07ee089bda30ef09cb70faa", "enabled": 1 } }, "rtas": { "100": { "checksum": "sha256:9d55dfe843e44e8a93c02ea28b14856edfdb1f820bb647992daa6af11e2dbd37", "enabled": 1 } }, "rtkit": { "100": { "checksum": "sha256:ea77b9f26c8fc61b7fc281099b2f16e75c5b196660fff55a95f96e97935a7a1b", "enabled": 1 } }, "rwho": { "100": { "checksum": "sha256:4468bfdd23924a96b4cf8c6fa1a3fa606fdd8ac69b7cb17c16a6e39a95908921", "enabled": 1 } }, "samba": { "100": { "checksum": "sha256:c97b92abaf053976c89a670d82bf06bc5c7d561ccf03e3ff1ac84be6e01cfc5c", "enabled": 1 } }, "sambagui": { "100": { "checksum": "sha256:18d1a69de368fa621e8ef3234b8ddb40261ced880bb732328a310db5a62a7a0a", "enabled": 1 } }, "sandboxX": { "100": { "checksum": "sha256:711df017c1f168e33245144d67289225439bbed701fb1146cb83e9cd63ce1f7a", "enabled": 1 } }, "sanlock": { "100": { "checksum": "sha256:093d9d9793142bb9a8c4375f5f368ca1a4d9beb0cd05329518f91bb9ea51bd06", "enabled": 1 } }, "sasl": { "100": { "checksum": "sha256:536ce94509d38b40200debf17fbddc16ec9004463fdb3fc42890dde9b3eb56f1", "enabled": 1 } }, "sbd": { "100": { "checksum": "sha256:57ecac942ea46af55728362527d70a3e135c3b4711688ddf62596b9a768d9fb0", "enabled": 1 } }, "sblim": { "100": { "checksum": "sha256:2ab2f52e6bac063f176e007b39cd8a4e43012ea075d82af20fbb3403891b6493", "enabled": 1 } }, "screen": { "100": { "checksum": "sha256:7df09c8fa09e105ecf51fee797975603a2df8d15c3a0bf00fdb1d565fe4a6b91", "enabled": 1 } }, "secadm": { "100": { "checksum": "sha256:9cf04d33aa9dec0b559c892fb20df89fbe1883544d4ac2d6bf6fc319f0a16663", "enabled": 1 } }, "sectoolm": { "100": { "checksum": "sha256:e7f9a696e0958d6bdbd6696e67a9b4af62430456d0f278e290db0ea1ee9750b7", "enabled": 1 } }, "selinuxutil": { "100": { "checksum": "sha256:c72355dc70789deb94777acd0b47c2c3ae628e8d90bffb0e0e320941e5ddf3b7", "enabled": 1 } }, "sendmail": { "100": { "checksum": "sha256:98f68238d6ca96277390c160adeed4e3e382d5ded5a88a3909cfebe986b849be", "enabled": 1 } }, "sensord": { "100": { "checksum": "sha256:10ca96a581ef4b0fa1789160fd71fb340d8b1d13906b42fab6e9119033d4f942", "enabled": 1 } }, "setrans": { "100": { "checksum": "sha256:3a172b4972f9271250b4d228541c78b0243fd0544ac983db0f590e09674f700d", "enabled": 1 } }, "setroubleshoot": { "100": { "checksum": "sha256:f78edfcb470cd9929f45b6db29ae4924a286ab30a03f80b7bdf3699bccb98314", "enabled": 1 } }, "seunshare": { "100": { "checksum": "sha256:ba2043d9665e2fd3a9e2d103671bfe647060b93d9c02eed2dca3066a0ecfb81d", "enabled": 1 } }, "sge": { "100": { "checksum": "sha256:cf843c98ff4113ded675f79df694549b4f848aecb1295f0a510101e301fbd348", "enabled": 1 } }, "shorewall": { "100": { "checksum": "sha256:c7c49d28e52aba4d168e684b9160a225fbecab373bfbb6963bbe89c93ecb867b", "enabled": 1 } }, "slocate": { "100": { "checksum": "sha256:be1825562f583305597e5ceb1298ebb60e42c4f270b4a7e3751cf9d9be1b1fac", "enabled": 1 } }, "slpd": { "100": { "checksum": "sha256:14748519962688e62b7bc7e7c03ad91c1f815c5d33c63f2d60e03340f55609a8", "enabled": 1 } }, "smartmon": { "100": { "checksum": "sha256:9f26cf1e9fa128e98c758a6325525f8547950a2440b6582202228c3c5c2c80d9", "enabled": 1 } }, "smokeping": { "100": { "checksum": "sha256:ae8cbd09d519a42bc01063c4c16f58e96cb3673acb557dcd2d09af444d742db1", "enabled": 1 } }, "smoltclient": { "100": { "checksum": "sha256:8aa5f2749eeaef5ae871dc903dad87611e369c92e9b3fc28b4944f75db785a18", "enabled": 1 } }, "smsd": { "100": { "checksum": "sha256:d36a762c836a0e4305773e352fe0f46657784b5d9bf749f02df9c6d15f68d101", "enabled": 1 } }, "snapper": { "100": { "checksum": "sha256:62bba8f6a236bae902815188cedbb5f3090acf0829247e6808787f8c913d9981", "enabled": 1 } }, "snmp": { "100": { "checksum": "sha256:68b5e9d408704e44ebf29ba76ae18afdcf6d8aef12794e8e9026997376ce12f8", "enabled": 1 } }, "snort": { "100": { "checksum": "sha256:eef39dec8d416650af3f9eeeb518b06dd9a9e09144aa579b6bd6422ba0037d70", "enabled": 1 } }, "sosreport": { "100": { "checksum": "sha256:c19dc2ed34c3d274f8e01647dc2d869ca06d4a9a3009f57c1845fac4d33ed358", "enabled": 1 } }, "soundserver": { "100": { "checksum": "sha256:a46a9508591afb1407fd14441c9c26cd495a3789e3c6792a2eba38a6642e4b97", "enabled": 1 } }, "spamassassin": { "100": { "checksum": "sha256:8255ad891466762e31763d6f4791a32aa1eea1147a812020724eab8eb07c1916", "enabled": 1 } }, "speech-dispatcher": { "100": { "checksum": "sha256:ce5ba130d5d0ae5fafe8f823b824856590f990ad7c08aa0a5930f5060c252021", "enabled": 1 } }, "squid": { "100": { "checksum": "sha256:4170a7354e69ed60e0268389f74042e02a2511a4451ca20b97a63213b8881e1e", "enabled": 1 } }, "ssh": { "100": { "checksum": "sha256:a4b4b395d2185abfd68edce0f813103ccbedd5d9748f9a41d83cc63dd1465109", "enabled": 1 } }, "sslh": { "100": { "checksum": "sha256:5b0cc219f31e88f2fa78bc31d9c6fe6c7af29b4832509635672ca9edc79409c6", "enabled": 1 } }, "sssd": { "100": { "checksum": "sha256:29cd0921e9effe356c856c3319488adf66c794cbb7d1610e5fca2b730b852939", "enabled": 1 } }, "staff": { "100": { "checksum": "sha256:943b25df416f2181aab46b3492aad9336f60a1b5b46187494f43ab516aae9c6a", "enabled": 1 } }, "stapserver": { "100": { "checksum": "sha256:788f2eb60a3d902060a6c5a08b086e2a1e96d213f86b206736da7e37eb21e51d", "enabled": 1 } }, "stratisd": { "100": { "checksum": "sha256:72c10f773d67b4209c39b4bea22e95c66d105f6f13e30f89bcd568eab6c889e3", "enabled": 1 } }, "stunnel": { "100": { "checksum": "sha256:736a46f682ff77d7c2cf54d5c264eb7b149793c12701b96e9be12bb3e6722796", "enabled": 1 } }, "su": { "100": { "checksum": "sha256:0cc5796bfe362c3b28c73f62377c029a5f2321078b6d5f90bad42764415cd038", "enabled": 1 } }, "sudo": { "100": { "checksum": "sha256:d96538a9cbb09fc38ba701cda88b2a0d199ab7826826d0043e4f07b05418bf84", "enabled": 1 } }, "svnserve": { "100": { "checksum": "sha256:a80606afbcc994e6fdc418cd83182f901d3e5b4b7b36fe262c71a25f43f10af1", "enabled": 1 } }, "swift": { "100": { "checksum": "sha256:19dfb362a8f445099eac9281522f0b13794cb9a0893a7acf0b54c15d193ef70e", "enabled": 1 } }, "sysadm": { "100": { "checksum": "sha256:f0e7b74086d47000f8335de5bade5a5a19a5e83bf581f885db92548546b7ea94", "enabled": 1 } }, "sysadm_secadm": { "100": { "checksum": "sha256:4614737ea0603530691e6158eb1bd07efa1992cb7ef52c201df3a637d3184cdf", "enabled": 1 } }, "sysnetwork": { "100": { "checksum": "sha256:f6a5a3b49885a9f780c5a9078cc968673809eaf89ecbe170fbb8a1ed4f521ea2", "enabled": 1 } }, "sysstat": { "100": { "checksum": "sha256:1fadc57b1e46515cbc038e96ae47ab74dd365a910f4d81ec9fb3044c4691260b", "enabled": 1 } }, "systemd": { "100": { "checksum": "sha256:a5f0e5c340eaf127a166cc50be8170bfce80ccee0c14f32e4cc264089350da1a", "enabled": 1 } }, "tangd": { "100": { "checksum": "sha256:fd538dbdeba0b4a1c244ba76b8dfef47f61da5a56f24f39fc24c137a9b3b303a", "enabled": 1 } }, "targetd": { "100": { "checksum": "sha256:bc0f37cdcdd0c9014e89e8be6758f7d9c97c67a4e42652459d6107314f059632", "enabled": 1 } }, "tcpd": { "100": { "checksum": "sha256:c78dcf2b9abf8d5ccf9f32b2debf6181a935a7078fe4a527991ab11d2999c4a9", "enabled": 1 } }, "tcsd": { "100": { "checksum": "sha256:e92fb82a2e509e3595d46dd464dac1029ce3a731f117fa67712d119d2878f195", "enabled": 1 } }, "telepathy": { "100": { "checksum": "sha256:fea41add022251126312da78373cb7fd05df1e9fd27547f1b4fc604a774827a1", "enabled": 1 } }, "telnet": { "100": { "checksum": "sha256:06d4733c0fc7358d738d4dbf53968c9d9017a72b01456be46633364f00a4207d", "enabled": 1 } }, "tftp": { "100": { "checksum": "sha256:8ba2497a28f4c2a31177811fc0a091a3bb9814f9e02cfc8d84c004718f661e5f", "enabled": 1 } }, "tgtd": { "100": { "checksum": "sha256:6ec8d4d38e58efa04572ac713c9148e7182e7d49713ed89955fabdd512b8eea4", "enabled": 1 } }, "thin": { "100": { "checksum": "sha256:c464da2b8e789d74ea2b2914217a194a3c07081b9f383acd2fee9ab77bc525b5", "enabled": 1 } }, "thumb": { "100": { "checksum": "sha256:2ce98252c7ff59539bb38204ee65898ba6cc701c3dc87417c11e2e7124f448a3", "enabled": 1 } }, "timedatex": { "100": { "checksum": "sha256:df36b9f44f28df1b14b4d6bff01de42c414b947a8e6f1e6efdaa7023250709aa", "enabled": 1 } }, "tlp": { "100": { "checksum": "sha256:7b1d2643c7470dc5b80dee41d18482bb6fd6de55371aba888708a28fe0bb0172", "enabled": 1 } }, "tmpreaper": { "100": { "checksum": "sha256:2a54cea48dfbeb1c9dad0e167f70aa17970c4f2c76c560330c467051fe3b574b", "enabled": 1 } }, "tomcat": { "100": { "checksum": "sha256:de3ed9b8d62d29e80e29a051419a648c154c12f6bb188814ca79120ff1dc263b", "enabled": 1 } }, "tor": { "100": { "checksum": "sha256:16c95ae098af2b964a7a94b5bb6cd1c84d5c7f1254d6411209e4d5cfe87677bc", "enabled": 1 } }, "tuned": { "100": { "checksum": "sha256:b90ac3a04d3f04c7284f75802ffd69d6c1c3d5c0e6d08c3d0f2d9270b99dd487", "enabled": 1 } }, "tvtime": { "100": { "checksum": "sha256:8f8a1f1b2fea7a9fb8c3853e02c830f5204f691e9223cbdfbc320ec6914725dc", "enabled": 1 } }, "udev": { "100": { "checksum": "sha256:24410f1221660b8443af29cb55e42180e268fce722ceed2c99aa202e7dd3cc21", "enabled": 1 } }, "ulogd": { "100": { "checksum": "sha256:dba41aee81015b99378cff2273a56effd1202c0c937c05c63a913243b0641cdc", "enabled": 1 } }, "uml": { "100": { "checksum": "sha256:29e7469ef2704943f23c5040531fee8657cfed8440ef44b6268d21e6a9afe309", "enabled": 1 } }, "unconfined": { "100": { "checksum": "sha256:54482715f4fb5bca5c68ff67b9d145d12ad3df1438db97bcadcc32a2fb0f6191", "enabled": 1 } }, "unconfineduser": { "100": { "checksum": "sha256:13e69d4cbec7926c0ac6fb796749b4286462add3051f1e94554f23e637b81277", "enabled": 1 } }, "unlabelednet": { "100": { "checksum": "sha256:cb370bbe8bc0d7bca49a4fd1fad652017f4f8587c7c9d3277155fba32987550e", "enabled": 1 } }, "unprivuser": { "100": { "checksum": "sha256:bbb2700ca73d867432851e12276a932b1553b034b1cc635f5c6681d6b62dcd3a", "enabled": 1 } }, "updfstab": { "100": { "checksum": "sha256:57a37a5c07af0f7ad80f4f01173e6cd6b604659e2d1b5605c2719dff8bbaf2fb", "enabled": 1 } }, "usbmodules": { "100": { "checksum": "sha256:683c0598bdd00543cb696f7ed8cce6b55c658e566141538fc01b3f852af5f697", "enabled": 1 } }, "usbmuxd": { "100": { "checksum": "sha256:852eb8259277c64b80c91bd1dcbbe85f629e7218ab2f51d39324dcd78a4a278e", "enabled": 1 } }, "userdomain": { "100": { "checksum": "sha256:066e429e71ebcf11014f4ff6d7647c9d6d88ff191c64eeb9793021d16f4cde97", "enabled": 1 } }, "userhelper": { "100": { "checksum": "sha256:74b817fb60fd3ed5f074ef8ff399342ddc49fb2c250b08015dc975edd48f4dfd", "enabled": 1 } }, "usermanage": { "100": { "checksum": "sha256:fa589ab303d10fadd28a3e8d27cc9bc2e55a9b28f28c3f4c7e05968cb00a7cdd", "enabled": 1 } }, "usernetctl": { "100": { "checksum": "sha256:c5e4e24e89775d797a8988e2d5f72ec7a7dd8387289ede61af7a3ce2173cf167", "enabled": 1 } }, "uucp": { "100": { "checksum": "sha256:6a3659d3706bc3af4b60e5de7efa9532dcc0c0a6f0c7735ed1300ec2120f9d01", "enabled": 1 } }, "uuidd": { "100": { "checksum": "sha256:f85ad7d20dd77416ab246ee0837b016a648176ec9956f40ff2ac6b3c2924edc5", "enabled": 1 } }, "varnishd": { "100": { "checksum": "sha256:18dab548c81b02f1b0f3efd6e25dd529bb0565e974156d55e42e274d3ccdf704", "enabled": 1 } }, "vdagent": { "100": { "checksum": "sha256:ee8af0b085b727e060ac3c82f1e38c89545505c9b26e849eda22e571064c46e7", "enabled": 1 } }, "vhostmd": { "100": { "checksum": "sha256:0f7c8c575b060e863fe17e7ee8c67cc5cc3ea31da734a5428dc62c15f3b15bf4", "enabled": 1 } }, "virt": { "100": { "checksum": "sha256:df433826471b1c65a3686b57b4b07872a695d900731feb88cd6dfb76ddcbc5d9", "enabled": 1 } }, "vlock": { "100": { "checksum": "sha256:4a9362fc5876897cae7062564d54d7f8ae12413c65c4c7fc6709f6407cc27160", "enabled": 1 } }, "vmtools": { "100": { "checksum": "sha256:fb9dda20b16232ac253b148063c9b267356b6f2831650f4c00fa01a6d0a8024a", "enabled": 1 } }, "vmware": { "100": { "checksum": "sha256:d0ce73ebc7d2f494b669257a9a68106245371b455566654c7062694bcbad35df", "enabled": 1 } }, "vnstatd": { "100": { "checksum": "sha256:1df1aaf42d9c96922226b4828c38b6d315f7a9d3cda60fe54d99be5d618e140d", "enabled": 1 } }, "vpn": { "100": { "checksum": "sha256:9ea8931bf1c97618b2e99afb8c60a13d51a84db878bffa4082f6973e23b13eb1", "enabled": 1 } }, "w3c": { "100": { "checksum": "sha256:43663b66ef8275c639a8076d92fc7da6821e0523c120e2c854839f9dc9d1db66", "enabled": 1 } }, "watchdog": { "100": { "checksum": "sha256:65b78e9b48a6cfe62f6c67c443d3bc667a58d206c09df00870949b6ae7ff8c30", "enabled": 1 } }, "wdmd": { "100": { "checksum": "sha256:65560477bd0ae271799a76f75c5a3d46ef0c29f6922aa38e727c95b7e1095a99", "enabled": 1 } }, "webadm": { "100": { "checksum": "sha256:4d4d609b3be3c2dc659694cfd2076e0c0c0d6446d16a3fb054a9e5f951b29410", "enabled": 1 } }, "webalizer": { "100": { "checksum": "sha256:867139a0cc2cb236ee54575ce6a8568cdbefd6785e8b7f64e09a3041da46b095", "enabled": 1 } }, "wine": { "100": { "checksum": "sha256:419d697ac987518dee6095070e2894c4112b50256e59d2b4f6acac585fb087f8", "enabled": 1 } }, "wireshark": { "100": { "checksum": "sha256:ce85b40df4d548aa55eb54bc546943366b654a3af7f602817f1fc499c0c8039e", "enabled": 1 } }, "xen": { "100": { "checksum": "sha256:f5d46e297e4e8e0a3f76c1fc8ae96db3ebf5b99ab538a54c171e489ac94ae1f0", "enabled": 1 } }, "xguest": { "100": { "checksum": "sha256:aeb8895098531d1607e389703c783a3c1e8a8c1ad962397debe65214ff86e29e", "enabled": 1 } }, "xserver": { "100": { "checksum": "sha256:85f1f1ed778597ec568ab7b9069779c088219d1da283a09382439c6803e7863e", "enabled": 1 } }, "zabbix": { "100": { "checksum": "sha256:476521323be1b84d7ba2539aa208d857678746a76e7e079577d3f46d251637ac", "enabled": 1 } }, "zarafa": { "100": { "checksum": "sha256:7536116b2852a578cbc5d32f7752b6dd3bb1202817db05306e1a16553c1d43b6", "enabled": 1 } }, "zebra": { "100": { "checksum": "sha256:3d18bbdc44c396c7715cce348f9248712132a1c53341d3b5760016d245f86e75", "enabled": 1 } }, "zoneminder": { "100": { "checksum": "sha256:44cf07d7e6b15709d131b8b406032d0e6395a84e1e20bc67f9320a1e97c4dfcc", "enabled": 1 } }, "zosremote": { "100": { "checksum": "sha256:1177170edbd47b6fe17fa022a247d9b75b1fb3a5a49721bcff3c7da4f480c702", "enabled": 1 } } }, "selinux_priorities": true }, "changed": false } TASK [fedora.linux_system_roles.selinux : Load SELinux modules] **************** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:115 Tuesday 19 November 2024 14:40:20 -0500 (0:00:03.658) 0:01:20.566 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "selinux_modules is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Restore SELinux labels on filesystem tree] *** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:128 Tuesday 19 November 2024 14:40:20 -0500 (0:00:00.054) 0:01:20.620 ****** skipping: [managed-node3] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.selinux : Restore SELinux labels on filesystem tree in check mode] *** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:136 Tuesday 19 November 2024 14:40:20 -0500 (0:00:00.046) 0:01:20.666 ****** skipping: [managed-node3] => { "changed": false, "skipped_reason": "No items in the list" } TASK [Check if all services work] ********************************************** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/tests/metrics/tests_verify_fullstack.yml:33 Tuesday 19 November 2024 14:40:20 -0500 (0:00:00.145) 0:01:20.812 ****** included: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/tests/metrics/check_pcp.yml for managed-node3 => (item=check_pcp.yml) included: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/tests/metrics/check_pmlogger.yml for managed-node3 => (item=check_pmlogger.yml) included: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/tests/metrics/check_pmie.yml for managed-node3 => (item=check_pmie.yml) included: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/tests/metrics/check_keyserver.yml for managed-node3 => (item=check_keyserver.yml) included: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/tests/metrics/check_pmproxy.yml for managed-node3 => (item=check_pmproxy.yml) included: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/tests/metrics/check_grafana.yml for managed-node3 => (item=check_grafana.yml) included: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/tests/metrics/check_grafanapcp.yml for managed-node3 => (item=check_grafanapcp.yml) included: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/tests/metrics/check_firewall_selinux.yml for managed-node3 => (item=check_firewall_selinux.yml) TASK [Check if PCP works] ****************************************************** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/tests/metrics/check_pcp.yml:3 Tuesday 19 November 2024 14:40:20 -0500 (0:00:00.120) 0:01:20.932 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "pcp" ], "delta": "0:00:00.106412", "end": "2024-11-19 14:40:21.502365", "rc": 0, "start": "2024-11-19 14:40:21.395953" } STDOUT: Performance Co-Pilot configuration on managed-node3: platform: Linux managed-node3 4.18.0-553.5.1.el8.x86_64 #1 SMP Tue May 21 05:46:01 UTC 2024 x86_64 hardware: 2 cpus, 1 disk, 1 node, 3539MB RAM timezone: EST+5 services: pmcd pmproxy pmcd: Version 5.3.7-20, 13 agents, 6 clients pmda: root pmcd proc pmproxy xfs linux nfsclient mmv kvm jbd2 dm openmetrics bpftrace pmlogger: primary logger: /var/log/pcp/pmlogger/managed-node3/20241119.14.38-00 pmie: primary engine: /var/log/pcp/pmie/managed-node3/pmie.log TASK [Check if pmlogger is running] ******************************************** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/tests/metrics/check_pmlogger.yml:3 Tuesday 19 November 2024 14:40:21 -0500 (0:00:00.646) 0:01:21.579 ****** ok: [managed-node3] => { "changed": false, "cmd": "set -euo pipefail\npmprobe -I pmcd.pmlogger.pmcd_host | grep '\"primary\"'\n", "delta": "0:00:00.009711", "end": "2024-11-19 14:40:22.059035", "rc": 0, "start": "2024-11-19 14:40:22.049324" } STDOUT: pmcd.pmlogger.pmcd_host 2 "34408" "primary" TASK [Check the ansible_managed header in the configuration file] ************** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/tests/metrics/check_pmlogger.yml:10 Tuesday 19 November 2024 14:40:22 -0500 (0:00:00.573) 0:01:22.153 ****** [WARNING]: TASK: Check the ansible_managed header in the configuration file: The loop variable 'item' is already in use. You should set the `loop_var` value in the `loop_control` option for the task to something else to avoid variable collisions and unexpected behavior. included: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/tests/metrics/check_header.yml for managed-node3 => (item=/etc/sysconfig/pmlogger) included: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/tests/metrics/check_header.yml for managed-node3 => (item=/etc/sysconfig/pmlogger_timers) TASK [Grep the ansible_managed header in /etc/sysconfig/pmlogger] ************** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/tests/metrics/check_header.yml:3 Tuesday 19 November 2024 14:40:22 -0500 (0:00:00.086) 0:01:22.239 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "grep", "^# Ansible managed", "/etc/sysconfig/pmlogger" ], "delta": "0:00:00.002981", "end": "2024-11-19 14:40:22.709545", "rc": 0, "start": "2024-11-19 14:40:22.706564" } STDOUT: # Ansible managed TASK [Grep the ansible_managed header in /etc/sysconfig/pmlogger_timers] ******* task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/tests/metrics/check_header.yml:3 Tuesday 19 November 2024 14:40:22 -0500 (0:00:00.545) 0:01:22.785 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "grep", "^# Ansible managed", "/etc/sysconfig/pmlogger_timers" ], "delta": "0:00:00.003087", "end": "2024-11-19 14:40:23.255419", "rc": 0, "start": "2024-11-19 14:40:23.252332" } STDOUT: # Ansible managed TASK [Check if primary pmie is running] **************************************** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/tests/metrics/check_pmie.yml:3 Tuesday 19 November 2024 14:40:23 -0500 (0:00:00.582) 0:01:23.367 ****** ok: [managed-node3] => { "changed": false, "cmd": "set -euo pipefail\npmprobe -I pmcd.pmie.pmcd_host | grep '\"primary\"'\n", "delta": "0:00:00.009765", "end": "2024-11-19 14:40:23.849130", "rc": 0, "start": "2024-11-19 14:40:23.839365" } STDOUT: pmcd.pmie.pmcd_host 2 "primary" "45423" TASK [Check if Valkey responds] ************************************************ task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/tests/metrics/check_keyserver.yml:3 Tuesday 19 November 2024 14:40:23 -0500 (0:00:00.556) 0:01:23.924 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "(ansible_distribution in ['RedHat', 'CentOS'] and ansible_distribution_major_version | int >= 10) or ansible_distribution not in ['RedHat', 'CentOS']", "skip_reason": "Conditional result was False" } TASK [Check if Redis responds] ************************************************* task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/tests/metrics/check_keyserver.yml:10 Tuesday 19 November 2024 14:40:24 -0500 (0:00:00.050) 0:01:23.974 ****** ok: [managed-node3] => { "changed": false, "cmd": "set -euo pipefail\nredis-cli PING | grep PONG\n", "delta": "0:00:00.007598", "end": "2024-11-19 14:40:24.448013", "rc": 0, "start": "2024-11-19 14:40:24.440415" } STDOUT: PONG TASK [Check if pmproxy responds] *********************************************** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/tests/metrics/check_pmproxy.yml:3 Tuesday 19 November 2024 14:40:24 -0500 (0:00:00.547) 0:01:24.522 ****** ok: [managed-node3] => { "access_control_allow_headers": "Accept, Accept-Language, Content-Language, Content-Type", "access_control_allow_origin": "*", "access_control_max_age": "86400", "changed": false, "connection": "Keep-Alive", "content_length": "18", "content_type": "application/octet-stream", "cookies": {}, "cookies_string": "", "date": "Tue, 19 Nov 2024 19:40:25 GMT", "elapsed": 0, "redirected": false, "status": 200, "url": "http://localhost:44322/series/ping" } MSG: OK (18 bytes) TASK [Check the ansible_managed header in the configuration file] ************** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/tests/metrics/check_pmproxy.yml:9 Tuesday 19 November 2024 14:40:25 -0500 (0:00:00.841) 0:01:25.364 ****** included: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/tests/metrics/check_header.yml for managed-node3 TASK [Grep the ansible_managed header in /etc/sysconfig/pmproxy] *************** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/tests/metrics/check_header.yml:3 Tuesday 19 November 2024 14:40:25 -0500 (0:00:00.081) 0:01:25.446 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "grep", "^# Ansible managed", "/etc/sysconfig/pmproxy" ], "delta": "0:00:00.003032", "end": "2024-11-19 14:40:25.907477", "rc": 0, "start": "2024-11-19 14:40:25.904445" } STDOUT: # Ansible managed TASK [Check if Grafana works] ************************************************** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/tests/metrics/check_grafana.yml:3 Tuesday 19 November 2024 14:40:26 -0500 (0:00:00.532) 0:01:25.978 ****** ok: [managed-node3] => { "cache_control": "no-cache", "changed": false, "connection": "close", "content_type": "text/html; charset=UTF-8", "cookies": {}, "cookies_string": "", "date": "Tue, 19 Nov 2024 19:40:26 GMT", "elapsed": 0, "expires": "-1", "pragma": "no-cache", "redirected": false, "status": 200, "transfer_encoding": "chunked", "url": "http://localhost:3000/login", "x_content_type_options": "nosniff", "x_frame_options": "deny", "x_xss_protection": "1; mode=block" } MSG: OK (unknown bytes) TASK [Check if grafana-pcp is installed] *************************************** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/tests/metrics/check_grafanapcp.yml:3 Tuesday 19 November 2024 14:40:26 -0500 (0:00:00.701) 0:01:26.680 ****** ok: [managed-node3] => { "changed": false, "cmd": "set -euo pipefail\n(cd /tmp && /usr/sbin/grafana-cli plugins ls) | grep performancecopilot\n", "delta": "0:00:00.125565", "end": "2024-11-19 14:40:27.264233", "rc": 0, "start": "2024-11-19 14:40:27.138668" } STDOUT: performancecopilot-pcp-app @ 5.1.1 TASK [Check the ansible_managed header in grafana.ini] ************************* task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/tests/metrics/check_grafanapcp.yml:9 Tuesday 19 November 2024 14:40:27 -0500 (0:00:00.657) 0:01:27.337 ****** included: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/tests/metrics/check_header.yml for managed-node3 TASK [Grep the ansible_managed header in /etc/grafana/grafana.ini] ************* task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/tests/metrics/check_header.yml:3 Tuesday 19 November 2024 14:40:27 -0500 (0:00:00.120) 0:01:27.457 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "grep", "^# Ansible managed", "/etc/grafana/grafana.ini" ], "delta": "0:00:00.003253", "end": "2024-11-19 14:40:27.922896", "rc": 0, "start": "2024-11-19 14:40:27.919643" } STDOUT: # Ansible managed TASK [Check firewall service status for grafana; metrics_manage_firewall is true] *** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/tests/metrics/check_firewall_selinux.yml:11 Tuesday 19 November 2024 14:40:28 -0500 (0:00:00.539) 0:01:27.997 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "firewall-cmd", "--list-services" ], "delta": "0:00:00.264724", "end": "2024-11-19 14:40:28.751665", "failed_when_result": false, "rc": 0, "start": "2024-11-19 14:40:28.486941" } STDOUT: cockpit dhcpv6-client grafana redis ssh TASK [Check firewall service status for redis; metrics_manage_firewall is true] *** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/tests/metrics/check_firewall_selinux.yml:34 Tuesday 19 November 2024 14:40:28 -0500 (0:00:00.836) 0:01:28.833 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "firewall-cmd", "--list-services" ], "delta": "0:00:00.258733", "end": "2024-11-19 14:40:29.588013", "failed_when_result": false, "rc": 0, "start": "2024-11-19 14:40:29.329280" } STDOUT: cockpit dhcpv6-client grafana redis ssh TASK [Check firewall port status for pmproxy; metrics_manage_firewall is true] *** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/tests/metrics/check_firewall_selinux.yml:45 Tuesday 19 November 2024 14:40:29 -0500 (0:00:00.844) 0:01:29.677 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "firewall-cmd", "--list-ports" ], "delta": "0:00:00.260008", "end": "2024-11-19 14:40:30.430774", "failed_when_result": false, "rc": 0, "start": "2024-11-19 14:40:30.170766" } STDOUT: 44321/tcp 44322/tcp TASK [Check firewall port status for pmcd; metrics_manage_firewall is true] **** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/tests/metrics/check_firewall_selinux.yml:54 Tuesday 19 November 2024 14:40:30 -0500 (0:00:00.832) 0:01:30.510 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "firewall-cmd", "--list-ports" ], "delta": "0:00:00.267855", "end": "2024-11-19 14:40:31.268462", "failed_when_result": false, "rc": 0, "start": "2024-11-19 14:40:31.000607" } STDOUT: 44321/tcp 44322/tcp TASK [Check firewall port status for keyserver; metrics_manage_firewall is true] *** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/tests/metrics/check_firewall_selinux.yml:60 Tuesday 19 November 2024 14:40:31 -0500 (0:00:00.857) 0:01:31.368 ****** fatal: [managed-node3]: FAILED! => { "changed": false, "cmd": [ "firewall-cmd", "--list-ports" ], "delta": "0:00:00.264008", "end": "2024-11-19 14:40:32.118368", "failed_when_result": true, "rc": 0, "start": "2024-11-19 14:40:31.854360" } STDOUT: 44321/tcp 44322/tcp TASK [Handle failure case] ***************************************************** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/tests/metrics/tests_verify_fullstack.yml:49 Tuesday 19 November 2024 14:40:32 -0500 (0:00:00.830) 0:01:32.198 ****** included: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/tests/metrics/handle_test_failure.yml for managed-node3 TASK [Collect logs] ************************************************************ task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/tests/metrics/handle_test_failure.yml:2 Tuesday 19 November 2024 14:40:32 -0500 (0:00:00.085) 0:01:32.283 ****** ok: [managed-node3] => { "changed": false, "cmd": "journalctl -ex\necho '##################'\necho List of SELinux AVCs - note list may be empty\ngrep type=AVC /var/log/audit/audit.log\necho '##################'\nls -alrtF /run\nif [ -d /run/pcp ]; then\n ls -alrtF /run/pcp\nelse\n echo ERROR - /run/pcp does not exist\nfi\n", "delta": "0:00:00.031564", "end": "2024-11-19 14:40:32.775637", "rc": 0, "start": "2024-11-19 14:40:32.744073" } STDOUT: -- Logs begin at Tue 2024-11-19 14:30:07 EST, end at Tue 2024-11-19 14:40:32 EST. -- Nov 19 14:33:58 ip-10-31-44-254.us-east-1.aws.redhat.com sshd[5109]: pam_unix(sshd:session): session opened for user root by (uid=0) Nov 19 14:33:58 ip-10-31-44-254.us-east-1.aws.redhat.com sshd[5110]: pam_unix(sshd:session): session opened for user root by (uid=0) Nov 19 14:33:58 ip-10-31-44-254.us-east-1.aws.redhat.com sshd[5127]: Received disconnect from 10.31.9.220 port 42918:11: disconnected by user Nov 19 14:33:58 ip-10-31-44-254.us-east-1.aws.redhat.com sshd[5127]: Disconnected from user root 10.31.9.220 port 42918 Nov 19 14:33:58 ip-10-31-44-254.us-east-1.aws.redhat.com sshd[5110]: pam_unix(sshd:session): session closed for user root Nov 19 14:33:58 ip-10-31-44-254.us-east-1.aws.redhat.com systemd[1]: session-4.scope: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit session-4.scope has successfully entered the 'dead' state. Nov 19 14:33:58 ip-10-31-44-254.us-east-1.aws.redhat.com systemd-logind[617]: Session 4 logged out. Waiting for processes to exit. Nov 19 14:33:58 ip-10-31-44-254.us-east-1.aws.redhat.com systemd-logind[617]: Removed session 4. -- Subject: Session 4 has been terminated -- Defined-By: systemd -- Support: https://access.redhat.com/support -- Documentation: https://www.freedesktop.org/wiki/Software/systemd/multiseat -- -- A session with the ID 4 has been terminated. Nov 19 14:35:01 ip-10-31-44-254.us-east-1.aws.redhat.com dbus-daemon[615]: [system] Activating via systemd: service name='org.freedesktop.hostname1' unit='dbus-org.freedesktop.hostname1.service' requested by ':1.25' (uid=0 pid=6551 comm="hostnamectl set-hostname managed-node3 " label="unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023") Nov 19 14:35:01 ip-10-31-44-254.us-east-1.aws.redhat.com systemd[1]: Starting Hostname Service... -- Subject: Unit systemd-hostnamed.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit systemd-hostnamed.service has begun starting up. Nov 19 14:35:02 ip-10-31-44-254.us-east-1.aws.redhat.com dbus-daemon[615]: [system] Successfully activated service 'org.freedesktop.hostname1' Nov 19 14:35:02 ip-10-31-44-254.us-east-1.aws.redhat.com systemd[1]: Started Hostname Service. -- Subject: Unit systemd-hostnamed.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit systemd-hostnamed.service has finished starting up. -- -- The start-up result is done. Nov 19 14:35:02 managed-node3 systemd-hostnamed[6552]: Changed static host name to 'managed-node3' Nov 19 14:35:02 managed-node3 systemd-hostnamed[6552]: Changed host name to 'managed-node3' Nov 19 14:35:02 managed-node3 NetworkManager[661]: [1732044902.0885] hostname: static hostname changed from "ip-10-31-44-254.us-east-1.aws.redhat.com" to "managed-node3" Nov 19 14:35:02 managed-node3 dbus-daemon[615]: [system] Activating via systemd: service name='org.freedesktop.nm_dispatcher' unit='dbus-org.freedesktop.nm-dispatcher.service' requested by ':1.5' (uid=0 pid=661 comm="/usr/sbin/NetworkManager --no-daemon " label="system_u:system_r:NetworkManager_t:s0") Nov 19 14:35:02 managed-node3 systemd[1]: Starting Network Manager Script Dispatcher Service... -- Subject: Unit NetworkManager-dispatcher.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit NetworkManager-dispatcher.service has begun starting up. Nov 19 14:35:02 managed-node3 NetworkManager[661]: [1732044902.0964] policy: set-hostname: set hostname to 'managed-node3' (from system configuration) Nov 19 14:35:02 managed-node3 dbus-daemon[615]: [system] Successfully activated service 'org.freedesktop.nm_dispatcher' Nov 19 14:35:02 managed-node3 systemd[1]: Started Network Manager Script Dispatcher Service. -- Subject: Unit NetworkManager-dispatcher.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit NetworkManager-dispatcher.service has finished starting up. -- -- The start-up result is done. Nov 19 14:35:12 managed-node3 systemd[1]: NetworkManager-dispatcher.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state. Nov 19 14:35:32 managed-node3 systemd[1]: systemd-hostnamed.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit systemd-hostnamed.service has successfully entered the 'dead' state. Nov 19 14:36:07 managed-node3 sshd[7112]: Accepted publickey for root from 10.31.47.166 port 47974 ssh2: RSA SHA256:9j1blwt3wcrRiGYZQ7ZGu9axm3cDklH6/z4c+Ee8CzE Nov 19 14:36:07 managed-node3 systemd-logind[617]: New session 6 of user root. -- Subject: A new session 6 has been created for user root -- Defined-By: systemd -- Support: https://access.redhat.com/support -- Documentation: https://www.freedesktop.org/wiki/Software/systemd/multiseat -- -- A new session with the ID 6 has been created for the user root. -- -- The leading process of the session is 7112. Nov 19 14:36:07 managed-node3 systemd[1]: Started Session 6 of user root. -- Subject: Unit session-6.scope has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit session-6.scope has finished starting up. -- -- The start-up result is done. Nov 19 14:36:07 managed-node3 sshd[7112]: pam_unix(sshd:session): session opened for user root by (uid=0) Nov 19 14:36:08 managed-node3 platform-python[7257]: ansible-ansible.legacy.setup Invoked with gather_subset=['all'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d Nov 19 14:36:09 managed-node3 platform-python[7409]: ansible-service_facts Invoked Nov 19 14:36:11 managed-node3 platform-python[7618]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Nov 19 14:36:11 managed-node3 platform-python[7741]: ansible-ansible.legacy.dnf Invoked with name=['pcp-pmda-bpftrace', 'bpftrace'] state=present allow_downgrade=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 allowerasing=False nobest=False use_backend=auto conf_file=None disable_excludes=None download_dir=None list=None releasever=None Nov 19 14:36:36 managed-node3 dbus-daemon[615]: [system] Reloaded configuration Nov 19 14:36:36 managed-node3 dbus-daemon[615]: [system] Reloaded configuration Nov 19 14:36:55 managed-node3 kernel: SELinux: Converting 380 SID table entries... Nov 19 14:36:55 managed-node3 kernel: SELinux: policy capability network_peer_controls=1 Nov 19 14:36:55 managed-node3 kernel: SELinux: policy capability open_perms=1 Nov 19 14:36:55 managed-node3 kernel: SELinux: policy capability extended_socket_class=1 Nov 19 14:36:55 managed-node3 kernel: SELinux: policy capability always_check_network=0 Nov 19 14:36:55 managed-node3 kernel: SELinux: policy capability cgroup_seclabel=1 Nov 19 14:36:55 managed-node3 kernel: SELinux: policy capability nnp_nosuid_transition=1 Nov 19 14:36:55 managed-node3 dbus-daemon[615]: [system] Reloaded configuration Nov 19 14:36:55 managed-node3 groupadd[8537]: group added to /etc/group: name=pcp, GID=991 Nov 19 14:36:56 managed-node3 groupadd[8537]: group added to /etc/gshadow: name=pcp Nov 19 14:36:56 managed-node3 groupadd[8537]: new group: name=pcp, GID=991 Nov 19 14:36:56 managed-node3 useradd[8544]: new user: name=pcp, UID=994, GID=991, home=/var/lib/pcp, shell=/sbin/nologin Nov 19 14:37:04 managed-node3 systemd[1]: Started /usr/bin/systemctl start man-db-cache-update. -- Subject: Unit run-r8928294d897440c49f2571fee9fdf9fe.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit run-r8928294d897440c49f2571fee9fdf9fe.service has finished starting up. -- -- The start-up result is done. Nov 19 14:37:04 managed-node3 systemd[1]: cgroup compatibility translation between legacy and unified hierarchy settings activated. See cgroup-compat debug messages for details. Nov 19 14:37:04 managed-node3 systemd[1]: Starting man-db-cache-update.service... -- Subject: Unit man-db-cache-update.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit man-db-cache-update.service has begun starting up. Nov 19 14:37:04 managed-node3 systemd[1]: Reloading. Nov 19 14:37:05 managed-node3 systemd[1]: man-db-cache-update.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit man-db-cache-update.service has successfully entered the 'dead' state. Nov 19 14:37:05 managed-node3 systemd[1]: Started man-db-cache-update.service. -- Subject: Unit man-db-cache-update.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit man-db-cache-update.service has finished starting up. -- -- The start-up result is done. Nov 19 14:37:05 managed-node3 systemd[1]: run-r8928294d897440c49f2571fee9fdf9fe.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit run-r8928294d897440c49f2571fee9fdf9fe.service has successfully entered the 'dead' state. Nov 19 14:37:06 managed-node3 platform-python[10126]: ansible-file Invoked with path=/etc/pcp/bpftrace state=directory mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:37:07 managed-node3 platform-python[10327]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/bpftrace/bpftrace.conf follow=True get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True Nov 19 14:37:07 managed-node3 platform-python[10493]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1732045026.6807578-8978-204637629594688/source dest=/etc/pcp/bpftrace/bpftrace.conf mode=0600 follow=True _original_basename=bpftrace.conf.j2 checksum=2559785fc812966eeb50dc5f6f22139195666360 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:37:08 managed-node3 platform-python[10696]: ansible-ansible.legacy.dnf Invoked with name=['redis'] state=present allow_downgrade=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 allowerasing=False nobest=False use_backend=auto conf_file=None disable_excludes=None download_dir=None list=None releasever=None Nov 19 14:37:12 managed-node3 groupadd[10704]: group added to /etc/group: name=redis, GID=990 Nov 19 14:37:12 managed-node3 groupadd[10704]: group added to /etc/gshadow: name=redis Nov 19 14:37:12 managed-node3 groupadd[10704]: new group: name=redis, GID=990 Nov 19 14:37:12 managed-node3 useradd[10711]: new user: name=redis, UID=993, GID=990, home=/var/lib/redis, shell=/sbin/nologin Nov 19 14:37:12 managed-node3 systemd[1]: Started /usr/bin/systemctl start man-db-cache-update. -- Subject: Unit run-r75bf36d32663470dbd2ad2a3308b3dd9.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit run-r75bf36d32663470dbd2ad2a3308b3dd9.service has finished starting up. -- -- The start-up result is done. Nov 19 14:37:12 managed-node3 systemd[1]: Starting man-db-cache-update.service... -- Subject: Unit man-db-cache-update.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit man-db-cache-update.service has begun starting up. Nov 19 14:37:12 managed-node3 systemd[1]: Reloading. Nov 19 14:37:13 managed-node3 systemd[1]: man-db-cache-update.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit man-db-cache-update.service has successfully entered the 'dead' state. Nov 19 14:37:13 managed-node3 systemd[1]: Started man-db-cache-update.service. -- Subject: Unit man-db-cache-update.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit man-db-cache-update.service has finished starting up. -- -- The start-up result is done. Nov 19 14:37:13 managed-node3 systemd[1]: run-r75bf36d32663470dbd2ad2a3308b3dd9.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit run-r75bf36d32663470dbd2ad2a3308b3dd9.service has successfully entered the 'dead' state. Nov 19 14:37:14 managed-node3 platform-python[11271]: ansible-file Invoked with path=/etc/redis state=directory owner=redis group=root mode=0750 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:37:14 managed-node3 platform-python[11472]: ansible-ansible.legacy.stat Invoked with path=/etc/redis/redis.conf follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True Nov 19 14:37:15 managed-node3 platform-python[11636]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1732045034.4558065-9310-205189171381882/source dest=/etc/redis/redis.conf mode=0640 owner=redis group=root follow=False _original_basename=CentOS_8_keyserver.conf.j2 checksum=06d89d7886a9a4126fb9baddfd6c1c2e3ab5271a backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:37:15 managed-node3 platform-python[11839]: ansible-file Invoked with src=/etc/redis/redis.conf dest=/etc/redis.conf state=link force=True path=/etc/redis.conf recurse=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:37:16 managed-node3 platform-python[12040]: ansible-ansible.legacy.systemd Invoked with name=redis state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Nov 19 14:37:16 managed-node3 systemd[1]: Reloading. Nov 19 14:37:16 managed-node3 systemd[1]: Starting Redis persistent key-value database... -- Subject: Unit redis.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit redis.service has begun starting up. Nov 19 14:37:16 managed-node3 systemd[1]: Started Redis persistent key-value database. -- Subject: Unit redis.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit redis.service has finished starting up. -- -- The start-up result is done. Nov 19 14:37:17 managed-node3 platform-python[12274]: ansible-ansible.legacy.dnf Invoked with name=['pcp', 'pcp-zeroconf'] state=present allow_downgrade=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 allowerasing=False nobest=False use_backend=auto conf_file=None disable_excludes=None download_dir=None list=None releasever=None Nov 19 14:37:21 managed-node3 systemd[1]: Starting Performance Metrics Collector Daemon... -- Subject: Unit pmcd.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit pmcd.service has begun starting up. Nov 19 14:37:21 managed-node3 systemd[1]: Started Performance Metrics Collector Daemon. -- Subject: Unit pmcd.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit pmcd.service has finished starting up. -- -- The start-up result is done. Nov 19 14:37:21 managed-node3 systemd[1]: Starting Performance Metrics Archive Logger... -- Subject: Unit pmlogger.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit pmlogger.service has begun starting up. Nov 19 14:37:21 managed-node3 systemd[1]: Starting Performance Metrics Inference Engine... -- Subject: Unit pmie.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit pmie.service has begun starting up. Nov 19 14:37:21 managed-node3 pmlogger[12396]: /usr/libexec/pcp/lib/pmlogger: Warning: Performance Co-Pilot archive logger(s) not permanently enabled. Nov 19 14:37:21 managed-node3 pmlogger[12396]: To enable pmlogger, run the following as root: Nov 19 14:37:21 managed-node3 pmlogger[12396]: # /usr/bin/systemctl enable pmlogger.service Nov 19 14:37:21 managed-node3 pmcd[12581]: Installing dm PMDA ... Nov 19 14:37:21 managed-node3 pmie[12398]: /usr/libexec/pcp/lib/pmie: Warning: Performance Co-Pilot Inference Engine (pmie) not permanently enabled. Nov 19 14:37:21 managed-node3 pmie[12398]: To enable pmie, run the following as root: Nov 19 14:37:21 managed-node3 pmie[12398]: # /usr/bin/systemctl enable pmie.service Nov 19 14:37:22 managed-node3 systemd[1]: Started Performance Metrics Inference Engine. -- Subject: Unit pmie.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit pmie.service has finished starting up. -- -- The start-up result is done. Nov 19 14:37:22 managed-node3 systemd[1]: Started Daily processing of PMIE logs. -- Subject: Unit pmie_daily.timer has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit pmie_daily.timer has finished starting up. -- -- The start-up result is done. Nov 19 14:37:22 managed-node3 systemd[1]: Starting pmie farm service... -- Subject: Unit pmie_farm.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit pmie_farm.service has begun starting up. Nov 19 14:37:22 managed-node3 systemd[1]: Started Half-hourly check of PMIE instances. -- Subject: Unit pmie_check.timer has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit pmie_check.timer has finished starting up. -- -- The start-up result is done. Nov 19 14:37:22 managed-node3 systemd[1]: Starting Check PMIE instances are running... -- Subject: Unit pmie_check.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit pmie_check.service has begun starting up. Nov 19 14:37:22 managed-node3 systemd[1]: Started pmie farm service. -- Subject: Unit pmie_farm.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit pmie_farm.service has finished starting up. -- -- The start-up result is done. Nov 19 14:37:22 managed-node3 systemd[1]: Started Check PMIE instances are running. -- Subject: Unit pmie_check.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit pmie_check.service has finished starting up. -- -- The start-up result is done. Nov 19 14:37:22 managed-node3 systemd[1]: Started Half-hourly check of pmie farm instances. -- Subject: Unit pmie_farm_check.timer has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit pmie_farm_check.timer has finished starting up. -- -- The start-up result is done. Nov 19 14:37:22 managed-node3 systemd[1]: Starting Check and migrate non-primary pmie farm instances... -- Subject: Unit pmie_farm_check.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit pmie_farm_check.service has begun starting up. Nov 19 14:37:22 managed-node3 systemd[1]: Started Check and migrate non-primary pmie farm instances. -- Subject: Unit pmie_farm_check.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit pmie_farm_check.service has finished starting up. -- -- The start-up result is done. Nov 19 14:37:22 managed-node3 systemd[1]: pmie_check.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit pmie_check.service has successfully entered the 'dead' state. Nov 19 14:37:22 managed-node3 systemd[1]: pmie_farm_check.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit pmie_farm_check.service has successfully entered the 'dead' state. Nov 19 14:37:22 managed-node3 systemd[1]: Started Performance Metrics Archive Logger. -- Subject: Unit pmlogger.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit pmlogger.service has finished starting up. -- -- The start-up result is done. Nov 19 14:37:22 managed-node3 systemd[1]: Started Daily processing of archive logs. -- Subject: Unit pmlogger_daily.timer has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit pmlogger_daily.timer has finished starting up. -- -- The start-up result is done. Nov 19 14:37:22 managed-node3 systemd[1]: Started Half-hourly check of pmlogger instances. -- Subject: Unit pmlogger_check.timer has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit pmlogger_check.timer has finished starting up. -- -- The start-up result is done. Nov 19 14:37:22 managed-node3 systemd[1]: Starting pmlogger farm service... -- Subject: Unit pmlogger_farm.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit pmlogger_farm.service has begun starting up. Nov 19 14:37:22 managed-node3 systemd[1]: Starting Check pmlogger instances are running... -- Subject: Unit pmlogger_check.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit pmlogger_check.service has begun starting up. Nov 19 14:37:22 managed-node3 systemd[1]: Reloading. Nov 19 14:37:23 managed-node3 systemd[1]: Started pmlogger farm service. -- Subject: Unit pmlogger_farm.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit pmlogger_farm.service has finished starting up. -- -- The start-up result is done. Nov 19 14:37:23 managed-node3 systemd[1]: Started Half-hourly check of pmlogger farm instances. -- Subject: Unit pmlogger_farm_check.timer has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit pmlogger_farm_check.timer has finished starting up. -- -- The start-up result is done. Nov 19 14:37:23 managed-node3 systemd[1]: Starting Check and migrate non-primary pmlogger farm instances... -- Subject: Unit pmlogger_farm_check.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit pmlogger_farm_check.service has begun starting up. Nov 19 14:37:23 managed-node3 systemd[1]: Started Check pmlogger instances are running. -- Subject: Unit pmlogger_check.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit pmlogger_check.service has finished starting up. -- -- The start-up result is done. Nov 19 14:37:23 managed-node3 systemd[1]: Started Check and migrate non-primary pmlogger farm instances. -- Subject: Unit pmlogger_farm_check.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit pmlogger_farm_check.service has finished starting up. -- -- The start-up result is done. Nov 19 14:37:23 managed-node3 systemd[1]: Started /usr/bin/systemctl start man-db-cache-update. -- Subject: Unit run-r39305905e3ff408e8a39d7eaae94e081.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit run-r39305905e3ff408e8a39d7eaae94e081.service has finished starting up. -- -- The start-up result is done. Nov 19 14:37:23 managed-node3 systemd[1]: Starting man-db-cache-update.service... -- Subject: Unit man-db-cache-update.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit man-db-cache-update.service has begun starting up. Nov 19 14:37:23 managed-node3 systemd[1]: Reloading. Nov 19 14:37:23 managed-node3 systemd[1]: pmlogger_farm_check.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit pmlogger_farm_check.service has successfully entered the 'dead' state. Nov 19 14:37:23 managed-node3 systemd[1]: pmlogger_check.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit pmlogger_check.service has successfully entered the 'dead' state. Nov 19 14:37:25 managed-node3 platform-python[16032]: ansible-ansible.legacy.dnf Invoked with name=['cyrus-sasl-lib', 'cyrus-sasl-scram'] state=present allow_downgrade=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 allowerasing=False nobest=False use_backend=auto conf_file=None disable_excludes=None download_dir=None list=None releasever=None Nov 19 14:37:26 managed-node3 systemd[1]: man-db-cache-update.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit man-db-cache-update.service has successfully entered the 'dead' state. Nov 19 14:37:26 managed-node3 systemd[1]: Started man-db-cache-update.service. -- Subject: Unit man-db-cache-update.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit man-db-cache-update.service has finished starting up. -- -- The start-up result is done. Nov 19 14:37:26 managed-node3 systemd[1]: run-r39305905e3ff408e8a39d7eaae94e081.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit run-r39305905e3ff408e8a39d7eaae94e081.service has successfully entered the 'dead' state. Nov 19 14:37:29 managed-node3 platform-python[17997]: ansible-ansible.legacy.command Invoked with _raw_params=cat /etc/pcp/pmcd/pmcd.conf _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Nov 19 14:37:29 managed-node3 kernel: device-mapper: uevent: version 1.0.3 Nov 19 14:37:29 managed-node3 kernel: device-mapper: ioctl: 4.46.0-ioctl (2022-02-22) initialised: dm-devel@redhat.com Nov 19 14:37:29 managed-node3 pmcd[18069]: Installing nfsclient PMDA ... Nov 19 14:37:30 managed-node3 platform-python[18290]: ansible-file Invoked with path=/var/lib/pcp/pmdas/bpftrace/.NeedInstall mode=u=rw,g=r,o=r state=touch recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:37:30 managed-node3 platform-python[18558]: ansible-file Invoked with path=/etc/pcp/labels state=directory mode=0755 owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:37:31 managed-node3 platform-python[18759]: ansible-file Invoked with path=/etc/pcp/labels/optional state=directory mode=0755 owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:37:32 managed-node3 platform-python[19055]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/labels/ansible-managed follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True Nov 19 14:37:32 managed-node3 platform-python[19219]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1732045051.6211305-10063-91150682976779/source dest=/etc/pcp/labels/ansible-managed mode=0644 follow=False _original_basename=pmcd.explicit.labels.j2 checksum=5f36b2ea290645ee34d943220a14b54ee5ea5be5 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:37:33 managed-node3 platform-python[19427]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/labels/optional/ansible-managed follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True Nov 19 14:37:33 managed-node3 platform-python[19591]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1732045052.6059096-10108-154795874752075/source dest=/etc/pcp/labels/optional/ansible-managed mode=0644 follow=False _original_basename=pmcd.implicit.labels.j2 checksum=5f36b2ea290645ee34d943220a14b54ee5ea5be5 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:37:33 managed-node3 pmcd[19672]: Installing openmetrics PMDA ... Nov 19 14:37:33 managed-node3 pmcd[19771]: [Tue Nov 19 14:37:33] pmdaopenmetrics(19771) Info: Initializing ... currently in notready state. Nov 19 14:37:33 managed-node3 pmcd[19771]: [Tue Nov 19 14:37:33] pmdaopenmetrics(19771) Info: Config change detected, traversed 1 config entries in 0.0001s, rescanning ... Nov 19 14:37:33 managed-node3 pmcd[19771]: [Tue Nov 19 14:37:33] pmdaopenmetrics(19771) Info: Found source grafana cluster 1 Nov 19 14:37:33 managed-node3 pmcd[19771]: [Tue Nov 19 14:37:33] pmdaopenmetrics(19771) Info: Ready to process requests Nov 19 14:37:34 managed-node3 platform-python[19844]: ansible-ansible.legacy.stat Invoked with path=/etc/sysconfig/pmcd follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True Nov 19 14:37:34 managed-node3 pmcd[19845]: [Tue Nov 19 14:37:34] pmdaopenmetrics(19845) Info: Initializing ... currently in notready state. Nov 19 14:37:34 managed-node3 pmcd[19845]: [Tue Nov 19 14:37:34] pmdaopenmetrics(19845) Info: Config change detected, traversed 1 config entries in 0.0001s, rescanning ... Nov 19 14:37:34 managed-node3 pmcd[19845]: [Tue Nov 19 14:37:34] pmdaopenmetrics(19845) Info: Found source grafana cluster 1 Nov 19 14:37:34 managed-node3 pmcd[19845]: [Tue Nov 19 14:37:34] pmdaopenmetrics(19845) Info: Ready to process requests Nov 19 14:37:34 managed-node3 platform-python[20097]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1732045053.6520653-10152-234223726502543/source dest=/etc/sysconfig/pmcd mode=0644 follow=False _original_basename=pmcd.defaults.j2 checksum=7518789c091387cd9c322e1a8fa8aad21d4efbd3 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:37:35 managed-node3 platform-python[20300]: ansible-user Invoked with name=metrics system=True state=present non_unique=False force=False remove=False create_home=True move_home=False append=False ssh_key_bits=0 ssh_key_type=rsa ssh_key_comment=ansible-generated on managed-node3 update_password=always uid=None group=None groups=None comment=None home=None shell=None password=NOT_LOGGING_PARAMETER login_class=None password_expire_max=None password_expire_min=None password_expire_warn=None hidden=None seuser=None skeleton=None generate_ssh_key=None ssh_key_file=None ssh_key_passphrase=NOT_LOGGING_PARAMETER expires=None password_lock=None local=None profile=None authorization=None role=None umask=None Nov 19 14:37:35 managed-node3 useradd[20396]: new group: name=metrics, GID=989 Nov 19 14:37:35 managed-node3 useradd[20396]: new user: name=metrics, UID=992, GID=989, home=/home/metrics, shell=/bin/bash Nov 19 14:37:36 managed-node3 platform-python[20607]: ansible-ansible.legacy.command Invoked with _raw_params=set -eu if set -o | grep -q pipefail; then set -o pipefail # pipefail not supported on debian, some ubuntu fi if ! sasldblistusers2 -f "/etc/pcp/passwd.db" | grep -q "^metrics@"; then echo "Creating new metrics user in /etc/pcp/passwd.db" echo "metrics" | saslpasswd2 -a pmcd "metrics" chown root:pcp "/etc/pcp/passwd.db" chmod 640 "/etc/pcp/passwd.db" fi _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Nov 19 14:37:36 managed-node3 sasldblistusers2[20612]: _sasldb_getkeyhandle has failed Nov 19 14:37:36 managed-node3 saslpasswd2[20615]: error deleting entry from sasldb: BDB0073 DB_NOTFOUND: No matching key/data pair found Nov 19 14:37:36 managed-node3 saslpasswd2[20615]: error deleting entry from sasldb: BDB0073 DB_NOTFOUND: No matching key/data pair found Nov 19 14:37:36 managed-node3 saslpasswd2[20615]: error deleting entry from sasldb: BDB0073 DB_NOTFOUND: No matching key/data pair found Nov 19 14:37:37 managed-node3 platform-python[20822]: ansible-ansible.legacy.stat Invoked with path=/etc/sasl2/pmcd.conf follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True Nov 19 14:37:37 managed-node3 platform-python[21001]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1732045056.6172512-10399-74776935828244/source dest=/etc/sasl2/pmcd.conf mode=0644 follow=False _original_basename=pmcd.sasl2.conf.j2 checksum=615d2de55ab86108da0c7e6b64988fecb4169771 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:37:38 managed-node3 platform-python[21215]: ansible-ansible.legacy.systemd Invoked with name=pmcd state=restarted enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Nov 19 14:37:38 managed-node3 systemd[1]: Stopping Performance Metrics Collector Daemon... -- Subject: Unit pmcd.service has begun shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit pmcd.service has begun shutting down. Nov 19 14:37:39 managed-node3 systemd[1]: pmcd.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit pmcd.service has successfully entered the 'dead' state. Nov 19 14:37:39 managed-node3 systemd[1]: Stopped Performance Metrics Collector Daemon. -- Subject: Unit pmcd.service has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit pmcd.service has finished shutting down. Nov 19 14:37:39 managed-node3 systemd[1]: Starting Performance Metrics Collector Daemon... -- Subject: Unit pmcd.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit pmcd.service has begun starting up. Nov 19 14:37:39 managed-node3 systemd[1]: Started Performance Metrics Collector Daemon. -- Subject: Unit pmcd.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit pmcd.service has finished starting up. -- -- The start-up result is done. Nov 19 14:37:39 managed-node3 pmcd[21749]: Installing bpftrace PMDA ... Nov 19 14:37:40 managed-node3 platform-python[22074]: ansible-file Invoked with path=/etc/pcp/pmieconf/network state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:37:40 managed-node3 platform-python[22275]: ansible-file Invoked with path=/etc/pcp/pmieconf/power state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:37:41 managed-node3 platform-python[22507]: ansible-file Invoked with path=/etc/pcp/pmieconf/zeroconf state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:37:42 managed-node3 platform-python[22776]: ansible-file Invoked with path=/etc/pcp/pmieconf/filesys state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:37:42 managed-node3 kernel: Adding Red Hat flag eBPF/event. Nov 19 14:37:42 managed-node3 platform-python[22993]: ansible-file Invoked with path=/var/lib/pcp/config/pmieconf/network state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:37:43 managed-node3 platform-python[23201]: ansible-file Invoked with path=/var/lib/pcp/config/pmieconf/power state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:37:44 managed-node3 platform-python[23420]: ansible-file Invoked with path=/var/lib/pcp/config/pmieconf/zeroconf state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:37:45 managed-node3 platform-python[23623]: ansible-file Invoked with path=/var/lib/pcp/config/pmieconf/filesys state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:37:45 managed-node3 platform-python[23824]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/pmieconf/network/tcplistenoverflows follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True Nov 19 14:37:46 managed-node3 platform-python[23990]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1732045065.319697-10750-187538904605322/source dest=/etc/pcp/pmieconf/network/tcplistenoverflows owner=root group=root mode=0644 _original_basename=tcplistenoverflows follow=False checksum=608d8a6ac6ee33bb86b77d28ba24fbcd378db43d backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:37:46 managed-node3 platform-python[24193]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/pmieconf/network/tcpqfulldocookies follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True Nov 19 14:37:47 managed-node3 platform-python[24359]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1732045066.260466-10750-258758944132216/source dest=/etc/pcp/pmieconf/network/tcpqfulldocookies owner=root group=root mode=0644 _original_basename=tcpqfulldocookies follow=False checksum=3256a5c2e8d07a20d8e97a08c0ab163252b0beae backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:37:47 managed-node3 platform-python[24562]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/pmieconf/network/tcpqfulldrops follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True Nov 19 14:37:48 managed-node3 platform-python[24728]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1732045067.2596946-10750-52363068821132/source dest=/etc/pcp/pmieconf/network/tcpqfulldrops owner=root group=root mode=0644 _original_basename=tcpqfulldrops follow=False checksum=37b2bd7f2430bd9678ab078c5e69a53bea556524 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:37:48 managed-node3 platform-python[24931]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/pmieconf/power/thermal_throttle follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True Nov 19 14:37:48 managed-node3 platform-python[25034]: ansible-ansible.legacy.file Invoked with owner=root group=root mode=0644 dest=/etc/pcp/pmieconf/power/thermal_throttle _original_basename=thermal_throttle recurse=False state=file path=/etc/pcp/pmieconf/power/thermal_throttle force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:37:49 managed-node3 platform-python[25235]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/pmieconf/zeroconf/all_threads follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True Nov 19 14:37:49 managed-node3 platform-python[25401]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1732045069.1141744-10750-40755735757100/source dest=/etc/pcp/pmieconf/zeroconf/all_threads owner=root group=root mode=0644 _original_basename=all_threads follow=False checksum=65169db16dcaa224c211373001adc3addf1031c4 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:37:50 managed-node3 platform-python[25604]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/pmieconf/filesys/vfs_files follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True Nov 19 14:37:50 managed-node3 platform-python[25768]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1732045070.1057148-10750-252457695410336/source dest=/etc/pcp/pmieconf/filesys/vfs_files owner=root group=root mode=0644 _original_basename=vfs_files follow=False checksum=cd5d85dfb8eebd7d9737d56e78bd969dafa3999c backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:37:51 managed-node3 platform-python[25971]: ansible-lineinfile Invoked with state=absent path=/var/lib/pcp/config/pmie/config.default regexp=//.*global webhook_endpoint = "" backrefs=False create=False backup=False firstmatch=False unsafe_writes=False search_string=None line=None insertafter=None insertbefore=None validate=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:37:52 managed-node3 platform-python[26172]: ansible-file Invoked with src=/etc/pcp/pmieconf/network/tcplistenoverflows dest=/var/lib/pcp/config/pmieconf/network/tcplistenoverflows state=link force=True path=/var/lib/pcp/config/pmieconf/network/tcplistenoverflows recurse=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:37:52 managed-node3 platform-python[26373]: ansible-file Invoked with src=/etc/pcp/pmieconf/network/tcpqfulldocookies dest=/var/lib/pcp/config/pmieconf/network/tcpqfulldocookies state=link force=True path=/var/lib/pcp/config/pmieconf/network/tcpqfulldocookies recurse=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:37:53 managed-node3 platform-python[26574]: ansible-file Invoked with src=/etc/pcp/pmieconf/network/tcpqfulldrops dest=/var/lib/pcp/config/pmieconf/network/tcpqfulldrops state=link force=True path=/var/lib/pcp/config/pmieconf/network/tcpqfulldrops recurse=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:37:53 managed-node3 platform-python[26775]: ansible-file Invoked with src=/etc/pcp/pmieconf/power/thermal_throttle dest=/var/lib/pcp/config/pmieconf/power/thermal_throttle state=link force=True path=/var/lib/pcp/config/pmieconf/power/thermal_throttle recurse=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:37:54 managed-node3 platform-python[26976]: ansible-file Invoked with src=/etc/pcp/pmieconf/zeroconf/all_threads dest=/var/lib/pcp/config/pmieconf/zeroconf/all_threads state=link force=True path=/var/lib/pcp/config/pmieconf/zeroconf/all_threads recurse=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:37:54 managed-node3 platform-python[27177]: ansible-file Invoked with src=/etc/pcp/pmieconf/filesys/vfs_files dest=/var/lib/pcp/config/pmieconf/filesys/vfs_files state=link force=True path=/var/lib/pcp/config/pmieconf/filesys/vfs_files recurse=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:37:55 managed-node3 platform-python[27378]: ansible-ansible.legacy.systemd Invoked with name=pmie state=restarted enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Nov 19 14:37:55 managed-node3 systemd[1]: Stopping pmie farm service... -- Subject: Unit pmie_farm.service has begun shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit pmie_farm.service has begun shutting down. Nov 19 14:37:55 managed-node3 systemd[1]: pmie_farm.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit pmie_farm.service has successfully entered the 'dead' state. Nov 19 14:37:55 managed-node3 systemd[1]: Stopped pmie farm service. -- Subject: Unit pmie_farm.service has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit pmie_farm.service has finished shutting down. Nov 19 14:37:55 managed-node3 systemd[1]: Stopping Performance Metrics Inference Engine... -- Subject: Unit pmie.service has begun shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit pmie.service has begun shutting down. Nov 19 14:37:55 managed-node3 systemd[1]: pmie.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit pmie.service has successfully entered the 'dead' state. Nov 19 14:37:55 managed-node3 systemd[1]: Stopped Performance Metrics Inference Engine. -- Subject: Unit pmie.service has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit pmie.service has finished shutting down. Nov 19 14:37:55 managed-node3 systemd[1]: Starting Performance Metrics Inference Engine... -- Subject: Unit pmie.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit pmie.service has begun starting up. Nov 19 14:37:56 managed-node3 systemd[1]: Started Performance Metrics Inference Engine. -- Subject: Unit pmie.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit pmie.service has finished starting up. -- -- The start-up result is done. Nov 19 14:37:56 managed-node3 systemd[1]: Starting pmie farm service... -- Subject: Unit pmie_farm.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit pmie_farm.service has begun starting up. Nov 19 14:37:56 managed-node3 systemd[1]: Started pmie farm service. -- Subject: Unit pmie_farm.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit pmie_farm.service has finished starting up. -- -- The start-up result is done. Nov 19 14:37:56 managed-node3 platform-python[28155]: ansible-lineinfile Invoked with path=/etc/pcp.conf regexp=^PCP_ARCHIVE_DIR= line=PCP_ARCHIVE_DIR=/var/log/pcp/pmlogger state=present backrefs=False create=False backup=False firstmatch=False unsafe_writes=False search_string=None insertafter=None insertbefore=None validate=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:37:57 managed-node3 platform-python[28356]: ansible-ansible.legacy.stat Invoked with path=/etc/sysconfig/pmlogger follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True Nov 19 14:37:57 managed-node3 platform-python[28522]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1732045077.0118337-11329-267741418248728/source dest=/etc/sysconfig/pmlogger mode=0644 follow=False _original_basename=pmlogger.defaults.j2 checksum=67bc35973101c614e92b1990f8bebfffc39fe498 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:37:58 managed-node3 platform-python[28725]: ansible-ansible.legacy.stat Invoked with path=/etc/sysconfig/pmlogger_timers follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True Nov 19 14:37:58 managed-node3 platform-python[28891]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1732045077.991943-11457-276445650908557/source dest=/etc/sysconfig/pmlogger_timers mode=0644 follow=False _original_basename=pmlogger.timers.j2 checksum=df7bd3b5b6f1de3af164aab81441c7251a13a298 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:37:59 managed-node3 platform-python[29094]: ansible-ansible.legacy.systemd Invoked with name=pmlogger state=restarted enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Nov 19 14:37:59 managed-node3 systemd[1]: Stopping pmlogger farm service... -- Subject: Unit pmlogger_farm.service has begun shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit pmlogger_farm.service has begun shutting down. Nov 19 14:37:59 managed-node3 systemd[1]: pmlogger_farm.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit pmlogger_farm.service has successfully entered the 'dead' state. Nov 19 14:37:59 managed-node3 systemd[1]: Stopped pmlogger farm service. -- Subject: Unit pmlogger_farm.service has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit pmlogger_farm.service has finished shutting down. Nov 19 14:37:59 managed-node3 systemd[1]: Stopping Performance Metrics Archive Logger... -- Subject: Unit pmlogger.service has begun shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit pmlogger.service has begun shutting down. Nov 19 14:37:59 managed-node3 systemd[1]: pmlogger.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit pmlogger.service has successfully entered the 'dead' state. Nov 19 14:37:59 managed-node3 systemd[1]: Stopped Performance Metrics Archive Logger. -- Subject: Unit pmlogger.service has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit pmlogger.service has finished shutting down. Nov 19 14:37:59 managed-node3 systemd[1]: Starting Performance Metrics Archive Logger... -- Subject: Unit pmlogger.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit pmlogger.service has begun starting up. Nov 19 14:38:00 managed-node3 systemd[1]: Started Performance Metrics Archive Logger. -- Subject: Unit pmlogger.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit pmlogger.service has finished starting up. -- -- The start-up result is done. Nov 19 14:38:00 managed-node3 systemd[1]: Starting pmlogger farm service... -- Subject: Unit pmlogger_farm.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit pmlogger_farm.service has begun starting up. Nov 19 14:38:00 managed-node3 systemd[1]: Started pmlogger farm service. -- Subject: Unit pmlogger_farm.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit pmlogger_farm.service has finished starting up. -- -- The start-up result is done. Nov 19 14:38:01 managed-node3 platform-python[29800]: ansible-ansible.legacy.stat Invoked with path=/etc/sysconfig/pmproxy follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True Nov 19 14:38:01 managed-node3 platform-python[29966]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1732045080.7700913-11623-258294591311632/source dest=/etc/sysconfig/pmproxy mode=0644 follow=False _original_basename=pmproxy.defaults.j2 checksum=fa1a173dfa5b1affbf6767115bdae2ce00e98ecc backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:38:02 managed-node3 platform-python[30169]: ansible-ansible.legacy.systemd Invoked with name=pmproxy state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Nov 19 14:38:02 managed-node3 systemd[1]: Reloading. Nov 19 14:38:02 managed-node3 systemd[1]: Starting Proxy for Performance Metrics Collector Daemon... -- Subject: Unit pmproxy.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit pmproxy.service has begun starting up. Nov 19 14:38:02 managed-node3 systemd[1]: Started Proxy for Performance Metrics Collector Daemon. -- Subject: Unit pmproxy.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit pmproxy.service has finished starting up. -- -- The start-up result is done. Nov 19 14:38:03 managed-node3 platform-python[30444]: ansible-ansible.legacy.dnf Invoked with name=['grafana'] state=present allow_downgrade=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 allowerasing=False nobest=False use_backend=auto conf_file=None disable_excludes=None download_dir=None list=None releasever=None Nov 19 14:38:36 managed-node3 kernel: SELinux: Converting 430 SID table entries... Nov 19 14:38:36 managed-node3 kernel: SELinux: policy capability network_peer_controls=1 Nov 19 14:38:36 managed-node3 kernel: SELinux: policy capability open_perms=1 Nov 19 14:38:36 managed-node3 kernel: SELinux: policy capability extended_socket_class=1 Nov 19 14:38:36 managed-node3 kernel: SELinux: policy capability always_check_network=0 Nov 19 14:38:36 managed-node3 kernel: SELinux: policy capability cgroup_seclabel=1 Nov 19 14:38:36 managed-node3 kernel: SELinux: policy capability nnp_nosuid_transition=1 Nov 19 14:38:36 managed-node3 dbus-daemon[615]: [system] Reloaded configuration Nov 19 14:38:37 managed-node3 kernel: SELinux: Converting 434 SID table entries... Nov 19 14:38:37 managed-node3 kernel: SELinux: policy capability network_peer_controls=1 Nov 19 14:38:37 managed-node3 kernel: SELinux: policy capability open_perms=1 Nov 19 14:38:37 managed-node3 kernel: SELinux: policy capability extended_socket_class=1 Nov 19 14:38:37 managed-node3 kernel: SELinux: policy capability always_check_network=0 Nov 19 14:38:37 managed-node3 kernel: SELinux: policy capability cgroup_seclabel=1 Nov 19 14:38:37 managed-node3 kernel: SELinux: policy capability nnp_nosuid_transition=1 Nov 19 14:38:37 managed-node3 dbus-daemon[615]: [system] Reloaded configuration Nov 19 14:38:38 managed-node3 systemd[1]: /usr/lib/systemd/system/grafana-server.service:28: Unknown lvalue 'ProtectClock' in section 'Service' Nov 19 14:38:38 managed-node3 systemd[1]: /usr/lib/systemd/system/grafana-server.service:31: Unknown lvalue 'ProtectHostname' in section 'Service' Nov 19 14:38:38 managed-node3 systemd[1]: /usr/lib/systemd/system/grafana-server.service:32: Unknown lvalue 'ProtectKernelLogs' in section 'Service' Nov 19 14:38:38 managed-node3 systemd[1]: /usr/lib/systemd/system/grafana-server.service:35: Unknown lvalue 'ProtectProc' in section 'Service' Nov 19 14:38:38 managed-node3 systemd[1]: /usr/lib/systemd/system/grafana-server.service:28: Unknown lvalue 'ProtectClock' in section 'Service' Nov 19 14:38:38 managed-node3 systemd[1]: /usr/lib/systemd/system/grafana-server.service:31: Unknown lvalue 'ProtectHostname' in section 'Service' Nov 19 14:38:38 managed-node3 systemd[1]: /usr/lib/systemd/system/grafana-server.service:32: Unknown lvalue 'ProtectKernelLogs' in section 'Service' Nov 19 14:38:38 managed-node3 systemd[1]: /usr/lib/systemd/system/grafana-server.service:35: Unknown lvalue 'ProtectProc' in section 'Service' Nov 19 14:38:38 managed-node3 systemd[1]: Started /usr/bin/systemctl start man-db-cache-update. -- Subject: Unit run-rb0790b9506e748c28e0b60f0636e07e2.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit run-rb0790b9506e748c28e0b60f0636e07e2.service has finished starting up. -- -- The start-up result is done. Nov 19 14:38:38 managed-node3 systemd[1]: Starting man-db-cache-update.service... -- Subject: Unit man-db-cache-update.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit man-db-cache-update.service has begun starting up. Nov 19 14:38:38 managed-node3 systemd[1]: Reloading. Nov 19 14:38:38 managed-node3 systemd[1]: man-db-cache-update.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit man-db-cache-update.service has successfully entered the 'dead' state. Nov 19 14:38:38 managed-node3 systemd[1]: Started man-db-cache-update.service. -- Subject: Unit man-db-cache-update.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit man-db-cache-update.service has finished starting up. -- -- The start-up result is done. Nov 19 14:38:38 managed-node3 systemd[1]: run-rb0790b9506e748c28e0b60f0636e07e2.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit run-rb0790b9506e748c28e0b60f0636e07e2.service has successfully entered the 'dead' state. Nov 19 14:38:39 managed-node3 platform-python[31074]: ansible-package_facts Invoked with manager=['auto'] strategy=first Nov 19 14:38:41 managed-node3 platform-python[31277]: ansible-ansible.legacy.stat Invoked with path=/etc/grafana/grafana.ini follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True Nov 19 14:38:42 managed-node3 platform-python[31443]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1732045121.2861042-12903-149903348498690/source dest=/etc/grafana/grafana.ini mode=0640 follow=False _original_basename=grafana_9.ini.j2 checksum=5cca05de69249344ed95e58493e10495e854415f backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:38:42 managed-node3 platform-python[31646]: ansible-file Invoked with path=/etc/grafana/provisioning/datasources state=directory group=grafana owner=root mode=0750 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:38:43 managed-node3 platform-python[31847]: ansible-ansible.legacy.stat Invoked with path=/etc/grafana/provisioning/datasources/grafana-pcp.yaml follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True Nov 19 14:38:43 managed-node3 platform-python[32011]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1732045122.801251-12948-180726627816941/source dest=/etc/grafana/provisioning/datasources/grafana-pcp.yaml mode=0644 follow=False _original_basename=grafana-pcp-datasources.yaml.j2 checksum=34116c67b70835430a24bb4b09b96ea7eeeb3d9d backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:38:44 managed-node3 platform-python[32214]: ansible-ansible.legacy.systemd Invoked with name=grafana-server state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Nov 19 14:38:44 managed-node3 systemd[1]: /usr/lib/systemd/system/grafana-server.service:28: Unknown lvalue 'ProtectClock' in section 'Service' Nov 19 14:38:44 managed-node3 systemd[1]: /usr/lib/systemd/system/grafana-server.service:31: Unknown lvalue 'ProtectHostname' in section 'Service' Nov 19 14:38:44 managed-node3 systemd[1]: /usr/lib/systemd/system/grafana-server.service:32: Unknown lvalue 'ProtectKernelLogs' in section 'Service' Nov 19 14:38:44 managed-node3 systemd[1]: /usr/lib/systemd/system/grafana-server.service:35: Unknown lvalue 'ProtectProc' in section 'Service' Nov 19 14:38:44 managed-node3 systemd[1]: Reloading. Nov 19 14:38:44 managed-node3 systemd[1]: /usr/lib/systemd/system/grafana-server.service:28: Unknown lvalue 'ProtectClock' in section 'Service' Nov 19 14:38:44 managed-node3 systemd[1]: /usr/lib/systemd/system/grafana-server.service:31: Unknown lvalue 'ProtectHostname' in section 'Service' Nov 19 14:38:44 managed-node3 systemd[1]: /usr/lib/systemd/system/grafana-server.service:32: Unknown lvalue 'ProtectKernelLogs' in section 'Service' Nov 19 14:38:44 managed-node3 systemd[1]: /usr/lib/systemd/system/grafana-server.service:35: Unknown lvalue 'ProtectProc' in section 'Service' Nov 19 14:38:44 managed-node3 systemd[1]: Starting Grafana instance... -- Subject: Unit grafana-server.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit grafana-server.service has begun starting up. Nov 19 14:38:44 managed-node3 grafana-server[32245]: logger=settings t=2024-11-19T14:38:44.698505441-05:00 level=info msg="Starting Grafana" version=9.2.10 commit=NA branch=main compiled=2024-11-19T14:38:44-05:00 Nov 19 14:38:44 managed-node3 grafana-server[32245]: logger=settings t=2024-11-19T14:38:44.698690194-05:00 level=info msg="Config loaded from" file=/usr/share/grafana/conf/defaults.ini Nov 19 14:38:44 managed-node3 grafana-server[32245]: logger=settings t=2024-11-19T14:38:44.698713992-05:00 level=info msg="Config loaded from" file=/etc/grafana/grafana.ini Nov 19 14:38:44 managed-node3 grafana-server[32245]: logger=settings t=2024-11-19T14:38:44.698730761-05:00 level=info msg="Config overridden from command line" arg="default.paths.data=/var/lib/grafana" Nov 19 14:38:44 managed-node3 grafana-server[32245]: logger=settings t=2024-11-19T14:38:44.698746895-05:00 level=info msg="Config overridden from command line" arg="default.paths.logs=/var/log/grafana" Nov 19 14:38:44 managed-node3 grafana-server[32245]: logger=settings t=2024-11-19T14:38:44.698815398-05:00 level=info msg="Config overridden from command line" arg="default.paths.plugins=/var/lib/grafana/plugins" Nov 19 14:38:44 managed-node3 grafana-server[32245]: logger=settings t=2024-11-19T14:38:44.698832762-05:00 level=info msg="Config overridden from command line" arg="default.paths.provisioning=/etc/grafana/provisioning" Nov 19 14:38:44 managed-node3 grafana-server[32245]: logger=settings t=2024-11-19T14:38:44.69885051-05:00 level=info msg="Path Home" path=/usr/share/grafana Nov 19 14:38:44 managed-node3 grafana-server[32245]: logger=settings t=2024-11-19T14:38:44.698868924-05:00 level=info msg="Path Data" path=/var/lib/grafana Nov 19 14:38:44 managed-node3 grafana-server[32245]: logger=settings t=2024-11-19T14:38:44.698886882-05:00 level=info msg="Path Logs" path=/var/log/grafana Nov 19 14:38:44 managed-node3 grafana-server[32245]: logger=settings t=2024-11-19T14:38:44.698905765-05:00 level=info msg="Path Plugins" path=/var/lib/grafana/plugins Nov 19 14:38:44 managed-node3 grafana-server[32245]: logger=settings t=2024-11-19T14:38:44.69892299-05:00 level=info msg="Path Provisioning" path=/etc/grafana/provisioning Nov 19 14:38:44 managed-node3 grafana-server[32245]: logger=settings t=2024-11-19T14:38:44.69893947-05:00 level=info msg="App mode production" Nov 19 14:38:44 managed-node3 grafana-server[32245]: logger=sqlstore t=2024-11-19T14:38:44.699040727-05:00 level=info msg="Connecting to DB" dbtype=sqlite3 Nov 19 14:38:44 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:44.70027632-05:00 level=info msg="Starting DB migrations" Nov 19 14:38:44 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:44.700430579-05:00 level=info msg="Executing migration" id="create migration_log table" Nov 19 14:38:44 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:44.703696519-05:00 level=info msg="Executing migration" id="create user table" Nov 19 14:38:44 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:44.705658631-05:00 level=info msg="Executing migration" id="add unique index user.login" Nov 19 14:38:44 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:44.70764093-05:00 level=info msg="Executing migration" id="add unique index user.email" Nov 19 14:38:44 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:44.71006495-05:00 level=info msg="Executing migration" id="drop index UQE_user_login - v1" Nov 19 14:38:44 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:44.712173701-05:00 level=info msg="Executing migration" id="drop index UQE_user_email - v1" Nov 19 14:38:44 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:44.714276367-05:00 level=info msg="Executing migration" id="Rename table user to user_v1 - v1" Nov 19 14:38:44 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:44.71682142-05:00 level=info msg="Executing migration" id="create user table v2" Nov 19 14:38:44 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:44.718828892-05:00 level=info msg="Executing migration" id="create index UQE_user_login - v2" Nov 19 14:38:44 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:44.720689587-05:00 level=info msg="Executing migration" id="create index UQE_user_email - v2" Nov 19 14:38:44 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:44.722628043-05:00 level=info msg="Executing migration" id="copy data_source v1 to v2" Nov 19 14:38:44 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:44.724546228-05:00 level=info msg="Executing migration" id="Drop old table user_v1" Nov 19 14:38:44 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:44.726460821-05:00 level=info msg="Executing migration" id="Add column help_flags1 to user table" Nov 19 14:38:44 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:44.728350635-05:00 level=info msg="Executing migration" id="Update user table charset" Nov 19 14:38:44 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:44.730319855-05:00 level=info msg="Executing migration" id="Add last_seen_at column to user" Nov 19 14:38:44 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:44.732396159-05:00 level=info msg="Executing migration" id="Add missing user data" Nov 19 14:38:44 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:44.734309427-05:00 level=info msg="Executing migration" id="Add is_disabled column to user" Nov 19 14:38:44 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:44.736273344-05:00 level=info msg="Executing migration" id="Add index user.login/user.email" Nov 19 14:38:44 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:44.738174141-05:00 level=info msg="Executing migration" id="Add is_service_account column to user" Nov 19 14:38:44 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:44.740308156-05:00 level=info msg="Executing migration" id="Update is_service_account column to nullable" Nov 19 14:38:44 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:44.743195154-05:00 level=info msg="Executing migration" id="create temp user table v1-7" Nov 19 14:38:44 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:44.745251007-05:00 level=info msg="Executing migration" id="create index IDX_temp_user_email - v1-7" Nov 19 14:38:44 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:44.747532795-05:00 level=info msg="Executing migration" id="create index IDX_temp_user_org_id - v1-7" Nov 19 14:38:44 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:44.749408978-05:00 level=info msg="Executing migration" id="create index IDX_temp_user_code - v1-7" Nov 19 14:38:44 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:44.751420997-05:00 level=info msg="Executing migration" id="create index IDX_temp_user_status - v1-7" Nov 19 14:38:44 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:44.753378133-05:00 level=info msg="Executing migration" id="Update temp_user table charset" Nov 19 14:38:44 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:44.755232881-05:00 level=info msg="Executing migration" id="drop index IDX_temp_user_email - v1" Nov 19 14:38:44 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:44.75724208-05:00 level=info msg="Executing migration" id="drop index IDX_temp_user_org_id - v1" Nov 19 14:38:44 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:44.759342112-05:00 level=info msg="Executing migration" id="drop index IDX_temp_user_code - v1" Nov 19 14:38:44 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:44.761413191-05:00 level=info msg="Executing migration" id="drop index IDX_temp_user_status - v1" Nov 19 14:38:44 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:44.763354754-05:00 level=info msg="Executing migration" id="Rename table temp_user to temp_user_tmp_qwerty - v1" Nov 19 14:38:44 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:44.765536597-05:00 level=info msg="Executing migration" id="create temp_user v2" Nov 19 14:38:44 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:44.767456354-05:00 level=info msg="Executing migration" id="create index IDX_temp_user_email - v2" Nov 19 14:38:44 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:44.769610427-05:00 level=info msg="Executing migration" id="create index IDX_temp_user_org_id - v2" Nov 19 14:38:44 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:44.771572981-05:00 level=info msg="Executing migration" id="create index IDX_temp_user_code - v2" Nov 19 14:38:44 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:44.773521099-05:00 level=info msg="Executing migration" id="create index IDX_temp_user_status - v2" Nov 19 14:38:44 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:44.775634934-05:00 level=info msg="Executing migration" id="copy temp_user v1 to v2" Nov 19 14:38:44 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:44.777931701-05:00 level=info msg="Executing migration" id="drop temp_user_tmp_qwerty" Nov 19 14:38:44 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:44.779934636-05:00 level=info msg="Executing migration" id="Set created for temp users that will otherwise prematurely expire" Nov 19 14:38:44 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:44.781820945-05:00 level=info msg="Executing migration" id="create star table" Nov 19 14:38:44 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:44.783764697-05:00 level=info msg="Executing migration" id="add unique index star.user_id_dashboard_id" Nov 19 14:38:44 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:44.78564398-05:00 level=info msg="Executing migration" id="create org table v1" Nov 19 14:38:44 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:44.78765953-05:00 level=info msg="Executing migration" id="create index UQE_org_name - v1" Nov 19 14:38:44 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:44.789569796-05:00 level=info msg="Executing migration" id="create org_user table v1" Nov 19 14:38:44 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:44.791541764-05:00 level=info msg="Executing migration" id="create index IDX_org_user_org_id - v1" Nov 19 14:38:44 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:44.793524657-05:00 level=info msg="Executing migration" id="create index UQE_org_user_org_id_user_id - v1" Nov 19 14:38:44 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:44.795480164-05:00 level=info msg="Executing migration" id="create index IDX_org_user_user_id - v1" Nov 19 14:38:44 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:44.797426837-05:00 level=info msg="Executing migration" id="Update org table charset" Nov 19 14:38:44 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:44.799338997-05:00 level=info msg="Executing migration" id="Update org_user table charset" Nov 19 14:38:44 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:44.801682874-05:00 level=info msg="Executing migration" id="Migrate all Read Only Viewers to Viewers" Nov 19 14:38:44 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:44.803527347-05:00 level=info msg="Executing migration" id="create dashboard table" Nov 19 14:38:44 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:44.80544284-05:00 level=info msg="Executing migration" id="add index dashboard.account_id" Nov 19 14:38:44 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:44.807483588-05:00 level=info msg="Executing migration" id="add unique index dashboard_account_id_slug" Nov 19 14:38:44 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:44.809406946-05:00 level=info msg="Executing migration" id="create dashboard_tag table" Nov 19 14:38:44 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:44.811242865-05:00 level=info msg="Executing migration" id="add unique index dashboard_tag.dasboard_id_term" Nov 19 14:38:44 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:44.813284472-05:00 level=info msg="Executing migration" id="drop index UQE_dashboard_tag_dashboard_id_term - v1" Nov 19 14:38:44 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:44.815177437-05:00 level=info msg="Executing migration" id="Rename table dashboard to dashboard_v1 - v1" Nov 19 14:38:44 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:44.817575153-05:00 level=info msg="Executing migration" id="create dashboard v2" Nov 19 14:38:44 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:44.819702973-05:00 level=info msg="Executing migration" id="create index IDX_dashboard_org_id - v2" Nov 19 14:38:44 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:44.821649741-05:00 level=info msg="Executing migration" id="create index UQE_dashboard_org_id_slug - v2" Nov 19 14:38:44 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:44.823617776-05:00 level=info msg="Executing migration" id="copy dashboard v1 to v2" Nov 19 14:38:44 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:44.825497824-05:00 level=info msg="Executing migration" id="drop table dashboard_v1" Nov 19 14:38:44 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:44.827605832-05:00 level=info msg="Executing migration" id="alter dashboard.data to mediumtext v1" Nov 19 14:38:44 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:44.829557799-05:00 level=info msg="Executing migration" id="Add column updated_by in dashboard - v2" Nov 19 14:38:44 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:44.832328279-05:00 level=info msg="Executing migration" id="Add column created_by in dashboard - v2" Nov 19 14:38:44 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:44.835223684-05:00 level=info msg="Executing migration" id="Add column gnetId in dashboard" Nov 19 14:38:44 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:44.838525873-05:00 level=info msg="Executing migration" id="Add index for gnetId in dashboard" Nov 19 14:38:44 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:44.842122088-05:00 level=info msg="Executing migration" id="Add column plugin_id in dashboard" Nov 19 14:38:44 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:44.8459662-05:00 level=info msg="Executing migration" id="Add index for plugin_id in dashboard" Nov 19 14:38:44 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:44.849742894-05:00 level=info msg="Executing migration" id="Add index for dashboard_id in dashboard_tag" Nov 19 14:38:44 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:44.853724157-05:00 level=info msg="Executing migration" id="Update dashboard table charset" Nov 19 14:38:44 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:44.857564561-05:00 level=info msg="Executing migration" id="Update dashboard_tag table charset" Nov 19 14:38:44 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:44.861812341-05:00 level=info msg="Executing migration" id="Add column folder_id in dashboard" Nov 19 14:38:44 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:44.865671015-05:00 level=info msg="Executing migration" id="Add column isFolder in dashboard" Nov 19 14:38:44 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:44.870143483-05:00 level=info msg="Executing migration" id="Add column has_acl in dashboard" Nov 19 14:38:44 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:44.874485749-05:00 level=info msg="Executing migration" id="Add column uid in dashboard" Nov 19 14:38:44 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:44.878500125-05:00 level=info msg="Executing migration" id="Update uid column values in dashboard" Nov 19 14:38:44 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:44.882380371-05:00 level=info msg="Executing migration" id="Add unique index dashboard_org_id_uid" Nov 19 14:38:44 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:44.886565367-05:00 level=info msg="Executing migration" id="Remove unique index org_id_slug" Nov 19 14:38:44 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:44.891071623-05:00 level=info msg="Executing migration" id="Update dashboard title length" Nov 19 14:38:44 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:44.894842744-05:00 level=info msg="Executing migration" id="Add unique index for dashboard_org_id_title_folder_id" Nov 19 14:38:44 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:44.898833949-05:00 level=info msg="Executing migration" id="create dashboard_provisioning" Nov 19 14:38:44 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:44.902762063-05:00 level=info msg="Executing migration" id="Rename table dashboard_provisioning to dashboard_provisioning_tmp_qwerty - v1" Nov 19 14:38:44 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:44.907665019-05:00 level=info msg="Executing migration" id="create dashboard_provisioning v2" Nov 19 14:38:44 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:44.911868693-05:00 level=info msg="Executing migration" id="create index IDX_dashboard_provisioning_dashboard_id - v2" Nov 19 14:38:44 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:44.916326123-05:00 level=info msg="Executing migration" id="create index IDX_dashboard_provisioning_dashboard_id_name - v2" Nov 19 14:38:44 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:44.920330137-05:00 level=info msg="Executing migration" id="copy dashboard_provisioning v1 to v2" Nov 19 14:38:44 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:44.924158842-05:00 level=info msg="Executing migration" id="drop dashboard_provisioning_tmp_qwerty" Nov 19 14:38:44 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:44.928268323-05:00 level=info msg="Executing migration" id="Add check_sum column" Nov 19 14:38:44 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:44.932484236-05:00 level=info msg="Executing migration" id="Add index for dashboard_title" Nov 19 14:38:44 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:44.936537928-05:00 level=info msg="Executing migration" id="delete tags for deleted dashboards" Nov 19 14:38:44 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:44.940312474-05:00 level=info msg="Executing migration" id="delete stars for deleted dashboards" Nov 19 14:38:44 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:44.944595646-05:00 level=info msg="Executing migration" id="Add index for dashboard_is_folder" Nov 19 14:38:44 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:44.948589756-05:00 level=info msg="Executing migration" id="Add isPublic for dashboard" Nov 19 14:38:44 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:44.952936241-05:00 level=info msg="Executing migration" id="create data_source table" Nov 19 14:38:44 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:44.958458112-05:00 level=info msg="Executing migration" id="add index data_source.account_id" Nov 19 14:38:44 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:44.962664185-05:00 level=info msg="Executing migration" id="add unique index data_source.account_id_name" Nov 19 14:38:44 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:44.96670015-05:00 level=info msg="Executing migration" id="drop index IDX_data_source_account_id - v1" Nov 19 14:38:44 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:44.970791823-05:00 level=info msg="Executing migration" id="drop index UQE_data_source_account_id_name - v1" Nov 19 14:38:44 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:44.974992747-05:00 level=info msg="Executing migration" id="Rename table data_source to data_source_v1 - v1" Nov 19 14:38:44 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:44.979892023-05:00 level=info msg="Executing migration" id="create data_source table v2" Nov 19 14:38:44 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:44.983901186-05:00 level=info msg="Executing migration" id="create index IDX_data_source_org_id - v2" Nov 19 14:38:44 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:44.988502403-05:00 level=info msg="Executing migration" id="create index UQE_data_source_org_id_name - v2" Nov 19 14:38:44 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:44.992586224-05:00 level=info msg="Executing migration" id="Drop old table data_source_v1 #2" Nov 19 14:38:44 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:44.996671335-05:00 level=info msg="Executing migration" id="Add column with_credentials" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.000848785-05:00 level=info msg="Executing migration" id="Add secure json data column" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.00526148-05:00 level=info msg="Executing migration" id="Update data_source table charset" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.009238702-05:00 level=info msg="Executing migration" id="Update initial version to 1" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.013365942-05:00 level=info msg="Executing migration" id="Add read_only data column" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.017485398-05:00 level=info msg="Executing migration" id="Migrate logging ds to loki ds" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.021362831-05:00 level=info msg="Executing migration" id="Update json_data with nulls" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.025175727-05:00 level=info msg="Executing migration" id="Add uid column" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.029416486-05:00 level=info msg="Executing migration" id="Update uid value" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.033708909-05:00 level=info msg="Executing migration" id="Add unique index datasource_org_id_uid" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.037669765-05:00 level=info msg="Executing migration" id="add unique index datasource_org_id_is_default" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.041863923-05:00 level=info msg="Executing migration" id="create api_key table" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.045883511-05:00 level=info msg="Executing migration" id="add index api_key.account_id" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.050102218-05:00 level=info msg="Executing migration" id="add index api_key.key" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.054164439-05:00 level=info msg="Executing migration" id="add index api_key.account_id_name" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.058374337-05:00 level=info msg="Executing migration" id="drop index IDX_api_key_account_id - v1" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.062723415-05:00 level=info msg="Executing migration" id="drop index UQE_api_key_key - v1" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.066970044-05:00 level=info msg="Executing migration" id="drop index UQE_api_key_account_id_name - v1" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.071019402-05:00 level=info msg="Executing migration" id="Rename table api_key to api_key_v1 - v1" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.075920436-05:00 level=info msg="Executing migration" id="create api_key table v2" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.080219608-05:00 level=info msg="Executing migration" id="create index IDX_api_key_org_id - v2" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.084831565-05:00 level=info msg="Executing migration" id="create index UQE_api_key_key - v2" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.088535714-05:00 level=info msg="Executing migration" id="create index UQE_api_key_org_id_name - v2" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.092753902-05:00 level=info msg="Executing migration" id="copy api_key v1 to v2" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.096714755-05:00 level=info msg="Executing migration" id="Drop old table api_key_v1" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.101001569-05:00 level=info msg="Executing migration" id="Update api_key table charset" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.1046006-05:00 level=info msg="Executing migration" id="Add expires to api_key table" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.108924537-05:00 level=info msg="Executing migration" id="Add service account foreign key" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.113092441-05:00 level=info msg="Executing migration" id="set service account foreign key to nil if 0" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.116918938-05:00 level=info msg="Executing migration" id="Add last_used_at to api_key table" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.121056237-05:00 level=info msg="Executing migration" id="Add is_revoked column to api_key table" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.12541339-05:00 level=info msg="Executing migration" id="create dashboard_snapshot table v4" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.13017521-05:00 level=info msg="Executing migration" id="drop table dashboard_snapshot_v4 #1" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.134157165-05:00 level=info msg="Executing migration" id="create dashboard_snapshot table v5 #2" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.138346352-05:00 level=info msg="Executing migration" id="create index UQE_dashboard_snapshot_key - v5" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.142751203-05:00 level=info msg="Executing migration" id="create index UQE_dashboard_snapshot_delete_key - v5" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.147539775-05:00 level=info msg="Executing migration" id="create index IDX_dashboard_snapshot_user_id - v5" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.151694851-05:00 level=info msg="Executing migration" id="alter dashboard_snapshot to mediumtext v2" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.155472009-05:00 level=info msg="Executing migration" id="Update dashboard_snapshot table charset" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.159730921-05:00 level=info msg="Executing migration" id="Add column external_delete_url to dashboard_snapshots table" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.163740284-05:00 level=info msg="Executing migration" id="Add encrypted dashboard json column" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.168389339-05:00 level=info msg="Executing migration" id="Change dashboard_encrypted column to MEDIUMBLOB" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.172616024-05:00 level=info msg="Executing migration" id="create quota table v1" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.176484582-05:00 level=info msg="Executing migration" id="create index UQE_quota_org_id_user_id_target - v1" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.180425054-05:00 level=info msg="Executing migration" id="Update quota table charset" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.18436087-05:00 level=info msg="Executing migration" id="create plugin_setting table" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.188410003-05:00 level=info msg="Executing migration" id="create index UQE_plugin_setting_org_id_plugin_id - v1" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.192623029-05:00 level=info msg="Executing migration" id="Add column plugin_version to plugin_settings" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.197448677-05:00 level=info msg="Executing migration" id="Update plugin_setting table charset" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.201049399-05:00 level=info msg="Executing migration" id="create session table" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.205124895-05:00 level=info msg="Executing migration" id="Drop old table playlist table" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.210931491-05:00 level=info msg="Executing migration" id="Drop old table playlist_item table" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.214803927-05:00 level=info msg="Executing migration" id="create playlist table v2" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.218893992-05:00 level=info msg="Executing migration" id="create playlist item table v2" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.223026072-05:00 level=info msg="Executing migration" id="Update playlist table charset" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.226816543-05:00 level=info msg="Executing migration" id="Update playlist_item table charset" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.230838403-05:00 level=info msg="Executing migration" id="drop preferences table v2" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.235097276-05:00 level=info msg="Executing migration" id="drop preferences table v3" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.239015284-05:00 level=info msg="Executing migration" id="create preferences table v3" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.243003218-05:00 level=info msg="Executing migration" id="Update preferences table charset" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.246864816-05:00 level=info msg="Executing migration" id="Add column team_id in preferences" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.251395673-05:00 level=info msg="Executing migration" id="Update team_id column values in preferences" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.255214779-05:00 level=info msg="Executing migration" id="Add column week_start in preferences" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.260151721-05:00 level=info msg="Executing migration" id="Add column preferences.json_data" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.26437787-05:00 level=info msg="Executing migration" id="alter preferences.json_data to mediumtext v1" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.268516123-05:00 level=info msg="Executing migration" id="create alert table v1" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.272971795-05:00 level=info msg="Executing migration" id="add index alert org_id & id " Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.277280442-05:00 level=info msg="Executing migration" id="add index alert state" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.281274808-05:00 level=info msg="Executing migration" id="add index alert dashboard_id" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.285490847-05:00 level=info msg="Executing migration" id="Create alert_rule_tag table v1" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.289435627-05:00 level=info msg="Executing migration" id="Add unique index alert_rule_tag.alert_id_tag_id" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.293593254-05:00 level=info msg="Executing migration" id="drop index UQE_alert_rule_tag_alert_id_tag_id - v1" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.297913012-05:00 level=info msg="Executing migration" id="Rename table alert_rule_tag to alert_rule_tag_v1 - v1" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.303019176-05:00 level=info msg="Executing migration" id="Create alert_rule_tag table v2" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.307095679-05:00 level=info msg="Executing migration" id="create index UQE_alert_rule_tag_alert_id_tag_id - Add unique index alert_rule_tag.alert_id_tag_id V2" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.31133146-05:00 level=info msg="Executing migration" id="copy alert_rule_tag v1 to v2" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.315104074-05:00 level=info msg="Executing migration" id="drop table alert_rule_tag_v1" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.319260052-05:00 level=info msg="Executing migration" id="create alert_notification table v1" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.323643088-05:00 level=info msg="Executing migration" id="Add column is_default" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.328048704-05:00 level=info msg="Executing migration" id="Add column frequency" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.332412242-05:00 level=info msg="Executing migration" id="Add column send_reminder" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.336864213-05:00 level=info msg="Executing migration" id="Add column disable_resolve_message" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.340974052-05:00 level=info msg="Executing migration" id="add index alert_notification org_id & name" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.345267486-05:00 level=info msg="Executing migration" id="Update alert table charset" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.349255587-05:00 level=info msg="Executing migration" id="Update alert_notification table charset" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.353402909-05:00 level=info msg="Executing migration" id="create notification_journal table v1" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.357636034-05:00 level=info msg="Executing migration" id="add index notification_journal org_id & alert_id & notifier_id" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.361722905-05:00 level=info msg="Executing migration" id="drop alert_notification_journal" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.366003744-05:00 level=info msg="Executing migration" id="create alert_notification_state table v1" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.37043119-05:00 level=info msg="Executing migration" id="add index alert_notification_state org_id & alert_id & notifier_id" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.374399658-05:00 level=info msg="Executing migration" id="Add for to alert table" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.379560789-05:00 level=info msg="Executing migration" id="Add column uid in alert_notification" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.383720704-05:00 level=info msg="Executing migration" id="Update uid column values in alert_notification" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.387597624-05:00 level=info msg="Executing migration" id="Add unique index alert_notification_org_id_uid" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.391684511-05:00 level=info msg="Executing migration" id="Remove unique index org_id_name" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.396380394-05:00 level=info msg="Executing migration" id="Add column secure_settings in alert_notification" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.400665303-05:00 level=info msg="Executing migration" id="alter alert.settings to mediumtext" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.404574145-05:00 level=info msg="Executing migration" id="Add non-unique index alert_notification_state_alert_id" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.408655924-05:00 level=info msg="Executing migration" id="Add non-unique index alert_rule_tag_alert_id" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.412758954-05:00 level=info msg="Executing migration" id="Drop old annotation table v4" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.416747673-05:00 level=info msg="Executing migration" id="create annotation table v5" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.420920808-05:00 level=info msg="Executing migration" id="add index annotation 0 v3" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.424934767-05:00 level=info msg="Executing migration" id="add index annotation 1 v3" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.428920116-05:00 level=info msg="Executing migration" id="add index annotation 2 v3" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.432871371-05:00 level=info msg="Executing migration" id="add index annotation 3 v3" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.437122215-05:00 level=info msg="Executing migration" id="add index annotation 4 v3" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.442368511-05:00 level=info msg="Executing migration" id="Update annotation table charset" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.446160074-05:00 level=info msg="Executing migration" id="Add column region_id to annotation table" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.450528144-05:00 level=info msg="Executing migration" id="Drop category_id index" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.454929354-05:00 level=info msg="Executing migration" id="Add column tags to annotation table" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.459342669-05:00 level=info msg="Executing migration" id="Create annotation_tag table v2" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.464350873-05:00 level=info msg="Executing migration" id="Add unique index annotation_tag.annotation_id_tag_id" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.46834267-05:00 level=info msg="Executing migration" id="drop index UQE_annotation_tag_annotation_id_tag_id - v2" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.472329437-05:00 level=info msg="Executing migration" id="Rename table annotation_tag to annotation_tag_v2 - v2" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.477806956-05:00 level=info msg="Executing migration" id="Create annotation_tag table v3" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.481777206-05:00 level=info msg="Executing migration" id="create index UQE_annotation_tag_annotation_id_tag_id - Add unique index annotation_tag.annotation_id_tag_id V3" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.486134563-05:00 level=info msg="Executing migration" id="copy annotation_tag v2 to v3" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.490072282-05:00 level=info msg="Executing migration" id="drop table annotation_tag_v2" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.494038819-05:00 level=info msg="Executing migration" id="Update alert annotations and set TEXT to empty" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.498068599-05:00 level=info msg="Executing migration" id="Add created time to annotation table" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.502321392-05:00 level=info msg="Executing migration" id="Add updated time to annotation table" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.506561288-05:00 level=info msg="Executing migration" id="Add index for created in annotation table" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.510486111-05:00 level=info msg="Executing migration" id="Add index for updated in annotation table" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.514557787-05:00 level=info msg="Executing migration" id="Convert existing annotations from seconds to milliseconds" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.518414364-05:00 level=info msg="Executing migration" id="Add epoch_end column" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.523014264-05:00 level=info msg="Executing migration" id="Add index for epoch_end" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.52720814-05:00 level=info msg="Executing migration" id="Make epoch_end the same as epoch" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.530994346-05:00 level=info msg="Executing migration" id="Move region to single row" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.53499002-05:00 level=info msg="Executing migration" id="Remove index org_id_epoch from annotation table" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.538898084-05:00 level=info msg="Executing migration" id="Remove index org_id_dashboard_id_panel_id_epoch from annotation table" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.542853839-05:00 level=info msg="Executing migration" id="Add index for org_id_dashboard_id_epoch_end_epoch on annotation table" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.546999354-05:00 level=info msg="Executing migration" id="Add index for org_id_epoch_end_epoch on annotation table" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.551160361-05:00 level=info msg="Executing migration" id="Remove index org_id_epoch_epoch_end from annotation table" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.555403718-05:00 level=info msg="Executing migration" id="Add index for alert_id on annotation table" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.559586799-05:00 level=info msg="Executing migration" id="Increase tags column to length 4096" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.563385955-05:00 level=info msg="Executing migration" id="create test_data table" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.567275935-05:00 level=info msg="Executing migration" id="create dashboard_version table v1" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.571115977-05:00 level=info msg="Executing migration" id="add index dashboard_version.dashboard_id" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.575262292-05:00 level=info msg="Executing migration" id="add unique index dashboard_version.dashboard_id and dashboard_version.version" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.579465027-05:00 level=info msg="Executing migration" id="Set dashboard version to 1 where 0" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.583431106-05:00 level=info msg="Executing migration" id="save existing dashboard data in dashboard_version table v1" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.587250991-05:00 level=info msg="Executing migration" id="alter dashboard_version.data to mediumtext v1" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.59121112-05:00 level=info msg="Executing migration" id="create team table" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.595578359-05:00 level=info msg="Executing migration" id="add index team.org_id" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.599852279-05:00 level=info msg="Executing migration" id="add unique index team_org_id_name" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.603876488-05:00 level=info msg="Executing migration" id="create team member table" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.60771869-05:00 level=info msg="Executing migration" id="add index team_member.org_id" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.61183705-05:00 level=info msg="Executing migration" id="add unique index team_member_org_id_team_id_user_id" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.615946571-05:00 level=info msg="Executing migration" id="add index team_member.team_id" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.621559791-05:00 level=info msg="Executing migration" id="Add column email to team table" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.62611102-05:00 level=info msg="Executing migration" id="Add column external to team_member table" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.630506134-05:00 level=info msg="Executing migration" id="Add column permission to team_member table" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.635125619-05:00 level=info msg="Executing migration" id="create dashboard acl table" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.639530968-05:00 level=info msg="Executing migration" id="add index dashboard_acl_dashboard_id" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.643820168-05:00 level=info msg="Executing migration" id="add unique index dashboard_acl_dashboard_id_user_id" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.648395397-05:00 level=info msg="Executing migration" id="add unique index dashboard_acl_dashboard_id_team_id" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.65240164-05:00 level=info msg="Executing migration" id="add index dashboard_acl_user_id" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.656456056-05:00 level=info msg="Executing migration" id="add index dashboard_acl_team_id" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.660430737-05:00 level=info msg="Executing migration" id="add index dashboard_acl_org_id_role" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.664550411-05:00 level=info msg="Executing migration" id="add index dashboard_permission" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.66889994-05:00 level=info msg="Executing migration" id="save default acl rules in dashboard_acl table" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.672961635-05:00 level=info msg="Executing migration" id="delete acl rules for deleted dashboards and folders" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.676900734-05:00 level=info msg="Executing migration" id="create tag table" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.680953475-05:00 level=info msg="Executing migration" id="add index tag.key_value" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.685115922-05:00 level=info msg="Executing migration" id="create login attempt table" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.689272074-05:00 level=info msg="Executing migration" id="add index login_attempt.username" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.693454785-05:00 level=info msg="Executing migration" id="drop index IDX_login_attempt_username - v1" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.697529871-05:00 level=info msg="Executing migration" id="Rename table login_attempt to login_attempt_tmp_qwerty - v1" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.703640982-05:00 level=info msg="Executing migration" id="create login_attempt v2" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.707814273-05:00 level=info msg="Executing migration" id="create index IDX_login_attempt_username - v2" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.711853099-05:00 level=info msg="Executing migration" id="copy login_attempt v1 to v2" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.716277226-05:00 level=info msg="Executing migration" id="drop login_attempt_tmp_qwerty" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.720728828-05:00 level=info msg="Executing migration" id="create user auth table" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.724485901-05:00 level=info msg="Executing migration" id="create index IDX_user_auth_auth_module_auth_id - v1" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.728848602-05:00 level=info msg="Executing migration" id="alter user_auth.auth_id to length 190" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.732447351-05:00 level=info msg="Executing migration" id="Add OAuth access token to user_auth" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.73674306-05:00 level=info msg="Executing migration" id="Add OAuth refresh token to user_auth" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.741480556-05:00 level=info msg="Executing migration" id="Add OAuth token type to user_auth" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.74592628-05:00 level=info msg="Executing migration" id="Add OAuth expiry to user_auth" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.750553238-05:00 level=info msg="Executing migration" id="Add index to user_id column in user_auth" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.75481814-05:00 level=info msg="Executing migration" id="Add OAuth ID token to user_auth" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.759295626-05:00 level=info msg="Executing migration" id="create server_lock table" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.763636826-05:00 level=info msg="Executing migration" id="add index server_lock.operation_uid" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.767421594-05:00 level=info msg="Executing migration" id="create user auth token table" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.771469341-05:00 level=info msg="Executing migration" id="add unique index user_auth_token.auth_token" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.775428571-05:00 level=info msg="Executing migration" id="add unique index user_auth_token.prev_auth_token" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.779655963-05:00 level=info msg="Executing migration" id="add index user_auth_token.user_id" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.783668267-05:00 level=info msg="Executing migration" id="Add revoked_at to the user auth token" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.78794888-05:00 level=info msg="Executing migration" id="create cache_data table" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.792583297-05:00 level=info msg="Executing migration" id="add unique index cache_data.cache_key" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.796737838-05:00 level=info msg="Executing migration" id="create short_url table v1" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.800802986-05:00 level=info msg="Executing migration" id="add index short_url.org_id-uid" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.804846499-05:00 level=info msg="Executing migration" id="alter table short_url alter column created_by type to bigint" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.808723677-05:00 level=info msg="Executing migration" id="delete alert_definition table" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.812720048-05:00 level=info msg="Executing migration" id="recreate alert_definition table" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.816809003-05:00 level=info msg="Executing migration" id="add index in alert_definition on org_id and title columns" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.82100832-05:00 level=info msg="Executing migration" id="add index in alert_definition on org_id and uid columns" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.825140238-05:00 level=info msg="Executing migration" id="alter alert_definition table data column to mediumtext in mysql" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.829137292-05:00 level=info msg="Executing migration" id="drop index in alert_definition on org_id and title columns" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.833225423-05:00 level=info msg="Executing migration" id="drop index in alert_definition on org_id and uid columns" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.837412549-05:00 level=info msg="Executing migration" id="add unique index in alert_definition on org_id and title columns" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.841551508-05:00 level=info msg="Executing migration" id="add unique index in alert_definition on org_id and uid columns" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.8459742-05:00 level=info msg="Executing migration" id="Add column paused in alert_definition" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.850988738-05:00 level=info msg="Executing migration" id="drop alert_definition table" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.855244679-05:00 level=info msg="Executing migration" id="delete alert_definition_version table" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.85911635-05:00 level=info msg="Executing migration" id="recreate alert_definition_version table" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.863256211-05:00 level=info msg="Executing migration" id="add index in alert_definition_version table on alert_definition_id and version columns" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.867340953-05:00 level=info msg="Executing migration" id="add index in alert_definition_version table on alert_definition_uid and version columns" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.871724965-05:00 level=info msg="Executing migration" id="alter alert_definition_version table data column to mediumtext in mysql" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.875461272-05:00 level=info msg="Executing migration" id="drop alert_definition_version table" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.87998548-05:00 level=info msg="Executing migration" id="create alert_instance table" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.88402828-05:00 level=info msg="Executing migration" id="add index in alert_instance table on def_org_id, def_uid and current_state columns" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.888276061-05:00 level=info msg="Executing migration" id="add index in alert_instance table on def_org_id, current_state columns" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.892518106-05:00 level=info msg="Executing migration" id="add column current_state_end to alert_instance" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.897210265-05:00 level=info msg="Executing migration" id="remove index def_org_id, def_uid, current_state on alert_instance" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.901140168-05:00 level=info msg="Executing migration" id="remove index def_org_id, current_state on alert_instance" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.90535686-05:00 level=info msg="Executing migration" id="rename def_org_id to rule_org_id in alert_instance" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.912894579-05:00 level=info msg="Executing migration" id="rename def_uid to rule_uid in alert_instance" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.920870261-05:00 level=info msg="Executing migration" id="add index rule_org_id, rule_uid, current_state on alert_instance" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.924974808-05:00 level=info msg="Executing migration" id="add index rule_org_id, current_state on alert_instance" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.929055691-05:00 level=info msg="Executing migration" id="add current_reason column related to current_state" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.933565198-05:00 level=info msg="Executing migration" id="create alert_rule table" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.937533584-05:00 level=info msg="Executing migration" id="add index in alert_rule on org_id and title columns" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.94192896-05:00 level=info msg="Executing migration" id="add index in alert_rule on org_id and uid columns" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.945904622-05:00 level=info msg="Executing migration" id="add index in alert_rule on org_id, namespace_uid, group_uid columns" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.949899867-05:00 level=info msg="Executing migration" id="alter alert_rule table data column to mediumtext in mysql" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.953713101-05:00 level=info msg="Executing migration" id="add column for to alert_rule" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.958647103-05:00 level=info msg="Executing migration" id="add column annotations to alert_rule" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.963336283-05:00 level=info msg="Executing migration" id="add column labels to alert_rule" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.968812446-05:00 level=info msg="Executing migration" id="remove unique index from alert_rule on org_id, title columns" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.972678893-05:00 level=info msg="Executing migration" id="add index in alert_rule on org_id, namespase_uid and title columns" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.976884759-05:00 level=info msg="Executing migration" id="add dashboard_uid column to alert_rule" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.981289364-05:00 level=info msg="Executing migration" id="add panel_id column to alert_rule" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.98573104-05:00 level=info msg="Executing migration" id="add index in alert_rule on org_id, dashboard_uid and panel_id columns" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.989822643-05:00 level=info msg="Executing migration" id="add rule_group_idx column to alert_rule" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.994270704-05:00 level=info msg="Executing migration" id="create alert_rule_version table" Nov 19 14:38:45 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:45.998399726-05:00 level=info msg="Executing migration" id="add index in alert_rule_version table on rule_org_id, rule_uid and version columns" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:46.002519604-05:00 level=info msg="Executing migration" id="add index in alert_rule_version table on rule_org_id, rule_namespace_uid and rule_group columns" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:46.006630815-05:00 level=info msg="Executing migration" id="alter alert_rule_version table data column to mediumtext in mysql" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:46.01061486-05:00 level=info msg="Executing migration" id="add column for to alert_rule_version" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:46.015319922-05:00 level=info msg="Executing migration" id="add column annotations to alert_rule_version" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:46.019716594-05:00 level=info msg="Executing migration" id="add column labels to alert_rule_version" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:46.024265609-05:00 level=info msg="Executing migration" id="add rule_group_idx column to alert_rule_version" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:46.02865911-05:00 level=info msg="Executing migration" id=create_alert_configuration_table Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:46.032722027-05:00 level=info msg="Executing migration" id="Add column default in alert_configuration" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:46.037116136-05:00 level=info msg="Executing migration" id="alert alert_configuration alertmanager_configuration column from TEXT to MEDIUMTEXT if mysql" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:46.04098008-05:00 level=info msg="Executing migration" id="add column org_id in alert_configuration" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:46.045434765-05:00 level=info msg="Executing migration" id="add index in alert_configuration table on org_id column" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:46.049385098-05:00 level=info msg="Executing migration" id="add configuration_hash column to alert_configuration" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:46.054104976-05:00 level=info msg="Executing migration" id=create_ngalert_configuration_table Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:46.058226193-05:00 level=info msg="Executing migration" id="add index in ngalert_configuration on org_id column" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:46.062239581-05:00 level=info msg="Executing migration" id="add column send_alerts_to in ngalert_configuration" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:46.066750824-05:00 level=info msg="Executing migration" id="create provenance_type table" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:46.070870574-05:00 level=info msg="Executing migration" id="add index to uniquify (record_key, record_type, org_id) columns" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:46.074922062-05:00 level=info msg="Executing migration" id="create alert_image table" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:46.079164907-05:00 level=info msg="Executing migration" id="add unique index on token to alert_image table" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:46.083312678-05:00 level=info msg="Executing migration" id="support longer URLs in alert_image table" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:46.087118732-05:00 level=info msg="Executing migration" id="clear migration entry \"remove unified alerting data\"" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:46.087345932-05:00 level=info msg="Executing migration" id="move dashboard alerts to unified alerting" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:46.087536104-05:00 level=info msg="alerts found to migrate" alerts=0 Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:46.091589649-05:00 level=info msg="Executing migration" id="create library_element table v1" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:46.095658627-05:00 level=info msg="Executing migration" id="add index library_element org_id-folder_id-name-kind" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:46.099816463-05:00 level=info msg="Executing migration" id="create library_element_connection table v1" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:46.104020532-05:00 level=info msg="Executing migration" id="add index library_element_connection element_id-kind-connection_id" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:46.108403425-05:00 level=info msg="Executing migration" id="add unique index library_element org_id_uid" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:46.113186395-05:00 level=info msg="Executing migration" id="increase max description length to 2048" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:46.117022839-05:00 level=info msg="Executing migration" id="clone move dashboard alerts to unified alerting" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:46.121344963-05:00 level=info msg="Executing migration" id="create data_keys table" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:46.125452535-05:00 level=info msg="Executing migration" id="create secrets table" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:46.129472524-05:00 level=info msg="Executing migration" id="rename data_keys name column to id" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:46.138546534-05:00 level=info msg="Executing migration" id="add name column into data_keys" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:46.143404461-05:00 level=info msg="Executing migration" id="copy data_keys id column values into name" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:46.147485226-05:00 level=info msg="Executing migration" id="rename data_keys name column to label" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:46.15577032-05:00 level=info msg="Executing migration" id="rename data_keys id column back to name" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:46.164724131-05:00 level=info msg="Executing migration" id="create kv_store table v1" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:46.168733924-05:00 level=info msg="Executing migration" id="add index kv_store.org_id-namespace-key" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:46.173169248-05:00 level=info msg="Executing migration" id="update dashboard_uid and panel_id from existing annotations" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:46.177145427-05:00 level=info msg="Executing migration" id="create permission table" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:46.181420419-05:00 level=info msg="Executing migration" id="add unique index permission.role_id" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:46.185671294-05:00 level=info msg="Executing migration" id="add unique index role_id_action_scope" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:46.189954975-05:00 level=info msg="Executing migration" id="create role table" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:46.193844673-05:00 level=info msg="Executing migration" id="add column display_name" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:46.198995154-05:00 level=info msg="Executing migration" id="add column group_name" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:46.203801565-05:00 level=info msg="Executing migration" id="add index role.org_id" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:46.20812707-05:00 level=info msg="Executing migration" id="add unique index role_org_id_name" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:46.212666026-05:00 level=info msg="Executing migration" id="add index role_org_id_uid" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:46.216472255-05:00 level=info msg="Executing migration" id="create team role table" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:46.221256797-05:00 level=info msg="Executing migration" id="add index team_role.org_id" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:46.225323713-05:00 level=info msg="Executing migration" id="add unique index team_role_org_id_team_id_role_id" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:46.229216497-05:00 level=info msg="Executing migration" id="add index team_role.team_id" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:46.233384323-05:00 level=info msg="Executing migration" id="create user role table" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:46.237426286-05:00 level=info msg="Executing migration" id="add index user_role.org_id" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:46.241430981-05:00 level=info msg="Executing migration" id="add unique index user_role_org_id_user_id_role_id" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:46.245513994-05:00 level=info msg="Executing migration" id="add index user_role.user_id" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:46.250192027-05:00 level=info msg="Executing migration" id="create builtin role table" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:46.25413193-05:00 level=info msg="Executing migration" id="add index builtin_role.role_id" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:46.258659349-05:00 level=info msg="Executing migration" id="add index builtin_role.name" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:46.262724162-05:00 level=info msg="Executing migration" id="Add column org_id to builtin_role table" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:46.267471618-05:00 level=info msg="Executing migration" id="add index builtin_role.org_id" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:46.271656524-05:00 level=info msg="Executing migration" id="add unique index builtin_role_org_id_role_id_role" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:46.275651851-05:00 level=info msg="Executing migration" id="Remove unique index role_org_id_uid" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:46.279705048-05:00 level=info msg="Executing migration" id="add unique index role.uid" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:46.283827871-05:00 level=info msg="Executing migration" id="create seed assignment table" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:46.287965559-05:00 level=info msg="Executing migration" id="add unique index builtin_role_role_name" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:46.292098978-05:00 level=info msg="Executing migration" id="add column hidden to role table" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:46.29696647-05:00 level=info msg="Executing migration" id="create query_history table v1" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:46.300977789-05:00 level=info msg="Executing migration" id="add index query_history.org_id-created_by-datasource_uid" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:46.305081184-05:00 level=info msg="Executing migration" id="alter table query_history alter column created_by type to bigint" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:46.308972944-05:00 level=info msg="Executing migration" id="teams permissions migration" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:46.313123503-05:00 level=info msg="Executing migration" id="dashboard permissions" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:46.31741141-05:00 level=info msg="Executing migration" id="dashboard permissions uid scopes" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:46.321405665-05:00 level=info msg="Executing migration" id="drop managed folder create actions" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:46.325251542-05:00 level=info msg="Executing migration" id="alerting notification permissions" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:46.32934-05:00 level=info msg="Executing migration" id="create query_history_star table v1" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:46.333382055-05:00 level=info msg="Executing migration" id="add index query_history.user_id-query_uid" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:46.337725712-05:00 level=info msg="Executing migration" id="add column org_id in query_history_star" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:46.342528708-05:00 level=info msg="Executing migration" id="alter table query_history_star_mig column user_id type to bigint" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:46.346462336-05:00 level=info msg="Executing migration" id="create correlation table v1" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:46.350517156-05:00 level=info msg="Executing migration" id="add index correlations.uid" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:46.354761662-05:00 level=info msg="Executing migration" id="add index correlations.source_uid" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:46.358867483-05:00 level=info msg="Executing migration" id="create entity_events table" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:46.36275711-05:00 level=info msg="Executing migration" id="create dashboard public config v1" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:46.367141229-05:00 level=info msg="Executing migration" id="drop index UQE_dashboard_public_config_uid - v1" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:46.367274401-05:00 level=warn msg="Skipping migration: Already executed, but not recorded in migration log" id="drop index UQE_dashboard_public_config_uid - v1" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:46.371128527-05:00 level=info msg="Executing migration" id="drop index IDX_dashboard_public_config_org_id_dashboard_uid - v1" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:46.371255828-05:00 level=warn msg="Skipping migration: Already executed, but not recorded in migration log" id="drop index IDX_dashboard_public_config_org_id_dashboard_uid - v1" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:46.375186959-05:00 level=info msg="Executing migration" id="Drop old dashboard public config table" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:46.379367649-05:00 level=info msg="Executing migration" id="recreate dashboard public config v1" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:46.383381464-05:00 level=info msg="Executing migration" id="create index UQE_dashboard_public_config_uid - v1" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:46.387577053-05:00 level=info msg="Executing migration" id="create index IDX_dashboard_public_config_org_id_dashboard_uid - v1" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:46.391910546-05:00 level=info msg="Executing migration" id="drop index UQE_dashboard_public_config_uid - v2" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:46.395958467-05:00 level=info msg="Executing migration" id="drop index IDX_dashboard_public_config_org_id_dashboard_uid - v2" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:46.400064518-05:00 level=info msg="Executing migration" id="Drop public config table" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:46.404235002-05:00 level=info msg="Executing migration" id="Recreate dashboard public config v2" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:46.408964913-05:00 level=info msg="Executing migration" id="create index UQE_dashboard_public_config_uid - v2" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:46.413063556-05:00 level=info msg="Executing migration" id="create index IDX_dashboard_public_config_org_id_dashboard_uid - v2" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:46.417162536-05:00 level=info msg="Executing migration" id="create index UQE_dashboard_public_config_access_token - v2" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:46.42133339-05:00 level=info msg="Executing migration" id="Rename table dashboard_public_config to dashboard_public - v2" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:46.4286082-05:00 level=info msg="Executing migration" id="create default alerting folders" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:46.432543134-05:00 level=info msg="Executing migration" id="create file table" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:46.436633945-05:00 level=info msg="Executing migration" id="file table idx: path natural pk" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:46.440605121-05:00 level=info msg="Executing migration" id="file table idx: parent_folder_path_hash fast folder retrieval" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:46.444881562-05:00 level=info msg="Executing migration" id="create file_meta table" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:46.44909573-05:00 level=info msg="Executing migration" id="file table idx: path key" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:46.453337545-05:00 level=info msg="Executing migration" id="set path collation in file table" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:46.457153338-05:00 level=info msg="Executing migration" id="managed permissions migration" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:46.461017297-05:00 level=info msg="Executing migration" id="managed folder permissions alert actions migration" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:46.465071011-05:00 level=info msg="Executing migration" id="RBAC action name migrator" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:46.46905959-05:00 level=info msg="Executing migration" id="Add UID column to playlist" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:46.474679809-05:00 level=info msg="Executing migration" id="Update uid column values in playlist" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:46.478507849-05:00 level=info msg="Executing migration" id="Add index for uid in playlist" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:46.482557953-05:00 level=info msg="Executing migration" id="update group index for alert rules" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:46.48681849-05:00 level=info msg="Executing migration" id="managed folder permissions alert actions repeated migration" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:46.490747855-05:00 level=info msg="Executing migration" id="admin only folder/dashboard permission" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:46.494602745-05:00 level=info msg="Executing migration" id="managed folder permissions alert actions repeated fixed migration" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=migrator t=2024-11-19T14:38:46.498388646-05:00 level=info msg="migrations completed" performed=456 skipped=0 duration=1.797999032s Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=sqlstore t=2024-11-19T14:38:46.509948893-05:00 level=info msg="Created default admin" user=admin Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=sqlstore t=2024-11-19T14:38:46.510113108-05:00 level=info msg="Created default organization" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=plugin.loader t=2024-11-19T14:38:46.545637463-05:00 level=info msg="Plugin registered" pluginID=input Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=plugin.signature.validator t=2024-11-19T14:38:46.54864965-05:00 level=warn msg="Permitting unsigned plugin. This is not recommended" pluginID=pcp-troubleshooting-panel pluginDir=/usr/share/performancecopilot-pcp-app/panels/troubleshooting Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=plugin.signature.validator t=2024-11-19T14:38:46.548695324-05:00 level=warn msg="Permitting unsigned plugin. This is not recommended" pluginID=performancecopilot-pcp-app pluginDir=/usr/share/performancecopilot-pcp-app Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=plugin.signature.validator t=2024-11-19T14:38:46.562487826-05:00 level=warn msg="Permitting unsigned plugin. This is not recommended" pluginID=pcp-bpftrace-datasource pluginDir=/usr/share/performancecopilot-pcp-app/datasources/bpftrace Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=plugin.signature.validator t=2024-11-19T14:38:46.562551115-05:00 level=warn msg="Permitting unsigned plugin. This is not recommended" pluginID=pcp-redis-datasource pluginDir=/usr/share/performancecopilot-pcp-app/datasources/redis Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=plugin.signature.validator t=2024-11-19T14:38:46.562583654-05:00 level=warn msg="Permitting unsigned plugin. This is not recommended" pluginID=pcp-vector-datasource pluginDir=/usr/share/performancecopilot-pcp-app/datasources/vector Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=plugin.signature.validator t=2024-11-19T14:38:46.562614496-05:00 level=warn msg="Permitting unsigned plugin. This is not recommended" pluginID=pcp-breadcrumbs-panel pluginDir=/usr/share/performancecopilot-pcp-app/panels/breadcrumbs Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=plugin.signature.validator t=2024-11-19T14:38:46.562655209-05:00 level=warn msg="Permitting unsigned plugin. This is not recommended" pluginID=pcp-flamegraph-panel pluginDir=/usr/share/performancecopilot-pcp-app/panels/flamegraph Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=plugin.loader t=2024-11-19T14:38:46.562721588-05:00 level=info msg="Plugin registered" pluginID=pcp-troubleshooting-panel Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=plugin.loader t=2024-11-19T14:38:46.562743445-05:00 level=info msg="Plugin registered" pluginID=performancecopilot-pcp-app Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=plugin.loader t=2024-11-19T14:38:46.562762862-05:00 level=info msg="Plugin registered" pluginID=pcp-bpftrace-datasource Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=plugin.loader t=2024-11-19T14:38:46.562781958-05:00 level=info msg="Plugin registered" pluginID=pcp-redis-datasource Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=plugin.loader t=2024-11-19T14:38:46.572057218-05:00 level=info msg="Plugin registered" pluginID=pcp-vector-datasource Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=plugin.loader t=2024-11-19T14:38:46.572096057-05:00 level=info msg="Plugin registered" pluginID=pcp-breadcrumbs-panel Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=plugin.loader t=2024-11-19T14:38:46.572114527-05:00 level=info msg="Plugin registered" pluginID=pcp-flamegraph-panel Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=secrets t=2024-11-19T14:38:46.572161621-05:00 level=info msg="Envelope encryption state" enabled=true currentprovider=secretKey.v1 Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=query_data t=2024-11-19T14:38:46.573877863-05:00 level=info msg="Query Service initialization" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=live.push_http t=2024-11-19T14:38:46.577047996-05:00 level=info msg="Live Push Gateway initialization" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=infra.usagestats.collector t=2024-11-19T14:38:46.68605596-05:00 level=info msg="registering usage stat providers" usageStatsProvidersLen=2 Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=server t=2024-11-19T14:38:46.686229184-05:00 level=info msg="Writing PID file" path=/var/run/grafana/grafana-server.pid pid=32245 Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=provisioning.datasources t=2024-11-19T14:38:46.687338464-05:00 level=info msg="inserting datasource from configuration " name="PCP Vector" uid=P9F9E462B02B531A3 Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=provisioning.datasources t=2024-11-19T14:38:46.702826212-05:00 level=info msg="inserting datasource from configuration " name="PCP Redis" uid=P691477986F10465E Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=provisioning.datasources t=2024-11-19T14:38:46.713345026-05:00 level=info msg="inserting datasource from configuration " name="PCP bpftrace" uid=P729B380B5A0EB179 Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=provisioning.alerting t=2024-11-19T14:38:46.72307864-05:00 level=error msg="can't read alerting provisioning files from directory" path=/etc/grafana/provisioning/alerting error="open /etc/grafana/provisioning/alerting: no such file or directory" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=provisioning.alerting t=2024-11-19T14:38:46.723125111-05:00 level=info msg="starting to provision alerting" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=provisioning.alerting t=2024-11-19T14:38:46.723148666-05:00 level=info msg="finished to provision alerting" Nov 19 14:38:46 managed-node3 systemd[1]: Started Grafana instance. -- Subject: Unit grafana-server.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit grafana-server.service has finished starting up. -- -- The start-up result is done. Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=grafanaStorageLogger t=2024-11-19T14:38:46.740527895-05:00 level=info msg="storage starting" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=http.server t=2024-11-19T14:38:46.750440861-05:00 level=info msg="HTTP Server Listen" address=[::]:3000 protocol=http subUrl= socket= Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=ngalert t=2024-11-19T14:38:46.750538832-05:00 level=info msg="warming cache for startup" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=ticker t=2024-11-19T14:38:46.754443015-05:00 level=info msg=starting first_tick=2024-11-19T14:38:50-05:00 Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=ngalert.multiorg.alertmanager t=2024-11-19T14:38:46.755992154-05:00 level=info msg="starting MultiOrg Alertmanager" Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=sqlstore.transactions t=2024-11-19T14:38:46.774327782-05:00 level=info msg="Database locked, sleeping then retrying" error="database is locked" retry=0 Nov 19 14:38:46 managed-node3 grafana-server[32245]: logger=sqlstore.transactions t=2024-11-19T14:38:46.812214361-05:00 level=info msg="Database locked, sleeping then retrying" error="database is locked" retry=1 Nov 19 14:38:47 managed-node3 platform-python[32458]: ansible-ansible.legacy.stat Invoked with path=/etc/grafana/provisioning/plugins/grafana-pcp.yaml follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True Nov 19 14:38:47 managed-node3 platform-python[32622]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1732045127.0194354-13041-149054040000614/source dest=/etc/grafana/provisioning/plugins/grafana-pcp.yaml owner=root group=grafana mode=0640 _original_basename=grafana-pcp-provisioning.yaml follow=False checksum=9e0223cef1828dcdd1326014cc4532b478a526d1 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:38:49 managed-node3 platform-python[32825]: ansible-ansible.legacy.uri Invoked with url=http://admin:********@localhost:3000/api/datasources/name/PCP%20Redis force_basic_auth=True headers={'Content-Type': 'application/json', 'Accept': 'application/json'} method=GET status_code=[200] force=False http_agent=ansible-httpget use_proxy=True validate_certs=True use_gssapi=False body_format=raw return_content=False follow_redirects=safe timeout=30 remote_src=False unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None dest=None body=None src=None creates=None removes=None unix_socket=None ca_path=None ciphers=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:38:50 managed-node3 platform-python[33028]: ansible-ansible.legacy.uri Invoked with url=http://admin:********@localhost:3000/api/datasources/name/PCP%20Vector force_basic_auth=True headers={'Content-Type': 'application/json', 'Accept': 'application/json'} method=GET status_code=[200] force=False http_agent=ansible-httpget use_proxy=True validate_certs=True use_gssapi=False body_format=raw return_content=False follow_redirects=safe timeout=30 remote_src=False unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None dest=None body=None src=None creates=None removes=None unix_socket=None ca_path=None ciphers=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:38:50 managed-node3 platform-python[33231]: ansible-ansible.legacy.uri Invoked with url=http://admin:********@localhost:3000/api/datasources/name/PCP%20bpftrace force_basic_auth=True headers={'Content-Type': 'application/json', 'Accept': 'application/json'} method=GET status_code=[200] force=False http_agent=ansible-httpget use_proxy=True validate_certs=True use_gssapi=False body_format=raw return_content=False follow_redirects=safe timeout=30 remote_src=False unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None dest=None body=None src=None creates=None removes=None unix_socket=None ca_path=None ciphers=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:38:51 managed-node3 platform-python[33434]: ansible-ansible.legacy.command Invoked with _raw_params=pmprobe -I pmcd.agent.status _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Nov 19 14:38:52 managed-node3 platform-python[33636]: ansible-ansible.legacy.systemd Invoked with name=redis state=restarted daemon_reload=False daemon_reexec=False scope=system no_block=False enabled=None force=None masked=None Nov 19 14:38:52 managed-node3 systemd[1]: Stopping Redis persistent key-value database... -- Subject: Unit redis.service has begun shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit redis.service has begun shutting down. Nov 19 14:38:52 managed-node3 systemd[1]: redis.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit redis.service has successfully entered the 'dead' state. Nov 19 14:38:52 managed-node3 systemd[1]: Stopped Redis persistent key-value database. -- Subject: Unit redis.service has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit redis.service has finished shutting down. Nov 19 14:38:52 managed-node3 systemd[1]: Starting Redis persistent key-value database... -- Subject: Unit redis.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit redis.service has begun starting up. Nov 19 14:38:52 managed-node3 systemd[1]: Started Redis persistent key-value database. -- Subject: Unit redis.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit redis.service has finished starting up. -- -- The start-up result is done. Nov 19 14:38:53 managed-node3 platform-python[33861]: ansible-ansible.legacy.systemd Invoked with name=pmproxy state=restarted daemon_reload=False daemon_reexec=False scope=system no_block=False enabled=None force=None masked=None Nov 19 14:38:53 managed-node3 systemd[1]: Stopping Proxy for Performance Metrics Collector Daemon... -- Subject: Unit pmproxy.service has begun shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit pmproxy.service has begun shutting down. Nov 19 14:38:53 managed-node3 systemd[1]: pmproxy.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit pmproxy.service has successfully entered the 'dead' state. Nov 19 14:38:53 managed-node3 systemd[1]: Stopped Proxy for Performance Metrics Collector Daemon. -- Subject: Unit pmproxy.service has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit pmproxy.service has finished shutting down. Nov 19 14:38:53 managed-node3 systemd[1]: Starting Proxy for Performance Metrics Collector Daemon... -- Subject: Unit pmproxy.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit pmproxy.service has begun starting up. Nov 19 14:38:53 managed-node3 systemd[1]: Started Proxy for Performance Metrics Collector Daemon. -- Subject: Unit pmproxy.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit pmproxy.service has finished starting up. -- -- The start-up result is done. Nov 19 14:38:53 managed-node3 platform-python[34115]: ansible-ansible.legacy.systemd Invoked with name=pmlogger state=restarted daemon_reload=False daemon_reexec=False scope=system no_block=False enabled=None force=None masked=None Nov 19 14:38:53 managed-node3 systemd[1]: Stopping pmlogger farm service... -- Subject: Unit pmlogger_farm.service has begun shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit pmlogger_farm.service has begun shutting down. Nov 19 14:38:53 managed-node3 systemd[1]: pmlogger_farm.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit pmlogger_farm.service has successfully entered the 'dead' state. Nov 19 14:38:53 managed-node3 systemd[1]: Stopped pmlogger farm service. -- Subject: Unit pmlogger_farm.service has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit pmlogger_farm.service has finished shutting down. Nov 19 14:38:53 managed-node3 systemd[1]: Stopping Performance Metrics Archive Logger... -- Subject: Unit pmlogger.service has begun shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit pmlogger.service has begun shutting down. Nov 19 14:38:53 managed-node3 systemd[1]: pmlogger.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit pmlogger.service has successfully entered the 'dead' state. Nov 19 14:38:53 managed-node3 systemd[1]: Stopped Performance Metrics Archive Logger. -- Subject: Unit pmlogger.service has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit pmlogger.service has finished shutting down. Nov 19 14:38:53 managed-node3 systemd[1]: Starting Performance Metrics Archive Logger... -- Subject: Unit pmlogger.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit pmlogger.service has begun starting up. Nov 19 14:38:54 managed-node3 systemd[1]: Started Performance Metrics Archive Logger. -- Subject: Unit pmlogger.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit pmlogger.service has finished starting up. -- -- The start-up result is done. Nov 19 14:38:54 managed-node3 systemd[1]: Starting pmlogger farm service... -- Subject: Unit pmlogger_farm.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit pmlogger_farm.service has begun starting up. Nov 19 14:38:54 managed-node3 systemd[1]: Started pmlogger farm service. -- Subject: Unit pmlogger_farm.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit pmlogger_farm.service has finished starting up. -- -- The start-up result is done. Nov 19 14:38:56 managed-node3 platform-python[34893]: ansible-ansible.legacy.systemd Invoked with name=grafana-server state=restarted daemon_reload=False daemon_reexec=False scope=system no_block=False enabled=None force=None masked=None Nov 19 14:38:56 managed-node3 systemd[1]: Stopping Grafana instance... -- Subject: Unit grafana-server.service has begun shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit grafana-server.service has begun shutting down. Nov 19 14:38:56 managed-node3 grafana-server[32245]: logger=server t=2024-11-19T14:38:56.091468132-05:00 level=info msg="Shutdown started" reason="System signal: terminated" Nov 19 14:38:56 managed-node3 grafana-server[32245]: logger=tracing t=2024-11-19T14:38:56.091969315-05:00 level=info msg="Closing tracing" Nov 19 14:38:56 managed-node3 grafana-server[32245]: logger=ticker t=2024-11-19T14:38:56.093082253-05:00 level=info msg=stopped last_tick=2024-11-19T14:38:50-05:00 Nov 19 14:38:56 managed-node3 systemd[1]: grafana-server.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit grafana-server.service has successfully entered the 'dead' state. Nov 19 14:38:56 managed-node3 systemd[1]: Stopped Grafana instance. -- Subject: Unit grafana-server.service has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit grafana-server.service has finished shutting down. Nov 19 14:38:56 managed-node3 systemd[1]: Starting Grafana instance... -- Subject: Unit grafana-server.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit grafana-server.service has begun starting up. Nov 19 14:38:56 managed-node3 grafana-server[34903]: logger=settings t=2024-11-19T14:38:56.433367776-05:00 level=info msg="Starting Grafana" version=9.2.10 commit=NA branch=main compiled=2024-11-19T14:38:56-05:00 Nov 19 14:38:56 managed-node3 grafana-server[34903]: logger=settings t=2024-11-19T14:38:56.433584382-05:00 level=info msg="Config loaded from" file=/usr/share/grafana/conf/defaults.ini Nov 19 14:38:56 managed-node3 grafana-server[34903]: logger=settings t=2024-11-19T14:38:56.433643124-05:00 level=info msg="Config loaded from" file=/etc/grafana/grafana.ini Nov 19 14:38:56 managed-node3 grafana-server[34903]: logger=settings t=2024-11-19T14:38:56.433663932-05:00 level=info msg="Config overridden from command line" arg="default.paths.data=/var/lib/grafana" Nov 19 14:38:56 managed-node3 grafana-server[34903]: logger=settings t=2024-11-19T14:38:56.433685125-05:00 level=info msg="Config overridden from command line" arg="default.paths.logs=/var/log/grafana" Nov 19 14:38:56 managed-node3 grafana-server[34903]: logger=settings t=2024-11-19T14:38:56.433704513-05:00 level=info msg="Config overridden from command line" arg="default.paths.plugins=/var/lib/grafana/plugins" Nov 19 14:38:56 managed-node3 grafana-server[34903]: logger=settings t=2024-11-19T14:38:56.433724173-05:00 level=info msg="Config overridden from command line" arg="default.paths.provisioning=/etc/grafana/provisioning" Nov 19 14:38:56 managed-node3 grafana-server[34903]: logger=settings t=2024-11-19T14:38:56.433743702-05:00 level=info msg="Path Home" path=/usr/share/grafana Nov 19 14:38:56 managed-node3 grafana-server[34903]: logger=settings t=2024-11-19T14:38:56.433762752-05:00 level=info msg="Path Data" path=/var/lib/grafana Nov 19 14:38:56 managed-node3 grafana-server[34903]: logger=settings t=2024-11-19T14:38:56.433781564-05:00 level=info msg="Path Logs" path=/var/log/grafana Nov 19 14:38:56 managed-node3 grafana-server[34903]: logger=settings t=2024-11-19T14:38:56.433800321-05:00 level=info msg="Path Plugins" path=/var/lib/grafana/plugins Nov 19 14:38:56 managed-node3 grafana-server[34903]: logger=settings t=2024-11-19T14:38:56.433819146-05:00 level=info msg="Path Provisioning" path=/etc/grafana/provisioning Nov 19 14:38:56 managed-node3 grafana-server[34903]: logger=settings t=2024-11-19T14:38:56.433839066-05:00 level=info msg="App mode production" Nov 19 14:38:56 managed-node3 grafana-server[34903]: logger=sqlstore t=2024-11-19T14:38:56.433915087-05:00 level=info msg="Connecting to DB" dbtype=sqlite3 Nov 19 14:38:56 managed-node3 grafana-server[34903]: logger=migrator t=2024-11-19T14:38:56.462887433-05:00 level=info msg="Starting DB migrations" Nov 19 14:38:56 managed-node3 grafana-server[34903]: logger=migrator t=2024-11-19T14:38:56.477057606-05:00 level=info msg="migrations completed" performed=0 skipped=452 duration=2.666553ms Nov 19 14:38:56 managed-node3 grafana-server[34903]: logger=plugin.loader t=2024-11-19T14:38:56.540490941-05:00 level=info msg="Plugin registered" pluginID=input Nov 19 14:38:56 managed-node3 grafana-server[34903]: logger=plugin.signature.validator t=2024-11-19T14:38:56.545847176-05:00 level=warn msg="Permitting unsigned plugin. This is not recommended" pluginID=pcp-vector-datasource pluginDir=/usr/share/performancecopilot-pcp-app/datasources/vector Nov 19 14:38:56 managed-node3 grafana-server[34903]: logger=plugin.signature.validator t=2024-11-19T14:38:56.545911409-05:00 level=warn msg="Permitting unsigned plugin. This is not recommended" pluginID=pcp-breadcrumbs-panel pluginDir=/usr/share/performancecopilot-pcp-app/panels/breadcrumbs Nov 19 14:38:56 managed-node3 grafana-server[34903]: logger=plugin.signature.validator t=2024-11-19T14:38:56.545946338-05:00 level=warn msg="Permitting unsigned plugin. This is not recommended" pluginID=pcp-flamegraph-panel pluginDir=/usr/share/performancecopilot-pcp-app/panels/flamegraph Nov 19 14:38:56 managed-node3 grafana-server[34903]: logger=plugin.signature.validator t=2024-11-19T14:38:56.545982239-05:00 level=warn msg="Permitting unsigned plugin. This is not recommended" pluginID=pcp-troubleshooting-panel pluginDir=/usr/share/performancecopilot-pcp-app/panels/troubleshooting Nov 19 14:38:56 managed-node3 grafana-server[34903]: logger=plugin.signature.validator t=2024-11-19T14:38:56.546010618-05:00 level=warn msg="Permitting unsigned plugin. This is not recommended" pluginID=performancecopilot-pcp-app pluginDir=/usr/share/performancecopilot-pcp-app Nov 19 14:38:56 managed-node3 grafana-server[34903]: logger=plugin.signature.validator t=2024-11-19T14:38:56.573002367-05:00 level=warn msg="Permitting unsigned plugin. This is not recommended" pluginID=pcp-bpftrace-datasource pluginDir=/usr/share/performancecopilot-pcp-app/datasources/bpftrace Nov 19 14:38:56 managed-node3 grafana-server[34903]: logger=plugin.signature.validator t=2024-11-19T14:38:56.57305808-05:00 level=warn msg="Permitting unsigned plugin. This is not recommended" pluginID=pcp-redis-datasource pluginDir=/usr/share/performancecopilot-pcp-app/datasources/redis Nov 19 14:38:56 managed-node3 grafana-server[34903]: logger=plugin.loader t=2024-11-19T14:38:56.573103828-05:00 level=info msg="Plugin registered" pluginID=pcp-vector-datasource Nov 19 14:38:56 managed-node3 grafana-server[34903]: logger=plugin.loader t=2024-11-19T14:38:56.573119797-05:00 level=info msg="Plugin registered" pluginID=pcp-breadcrumbs-panel Nov 19 14:38:56 managed-node3 grafana-server[34903]: logger=plugin.loader t=2024-11-19T14:38:56.57313202-05:00 level=info msg="Plugin registered" pluginID=pcp-flamegraph-panel Nov 19 14:38:56 managed-node3 grafana-server[34903]: logger=plugin.loader t=2024-11-19T14:38:56.573151897-05:00 level=info msg="Plugin registered" pluginID=pcp-troubleshooting-panel Nov 19 14:38:56 managed-node3 grafana-server[34903]: logger=plugin.loader t=2024-11-19T14:38:56.573164529-05:00 level=info msg="Plugin registered" pluginID=performancecopilot-pcp-app Nov 19 14:38:56 managed-node3 grafana-server[34903]: logger=plugin.loader t=2024-11-19T14:38:56.573176678-05:00 level=info msg="Plugin registered" pluginID=pcp-bpftrace-datasource Nov 19 14:38:56 managed-node3 grafana-server[34903]: logger=plugin.loader t=2024-11-19T14:38:56.573188602-05:00 level=info msg="Plugin registered" pluginID=pcp-redis-datasource Nov 19 14:38:56 managed-node3 grafana-server[34903]: logger=secrets t=2024-11-19T14:38:56.601953035-05:00 level=info msg="Envelope encryption state" enabled=true currentprovider=secretKey.v1 Nov 19 14:38:56 managed-node3 grafana-server[34903]: logger=query_data t=2024-11-19T14:38:56.60494027-05:00 level=info msg="Query Service initialization" Nov 19 14:38:56 managed-node3 grafana-server[34903]: logger=live.push_http t=2024-11-19T14:38:56.616157616-05:00 level=info msg="Live Push Gateway initialization" Nov 19 14:38:56 managed-node3 grafana-server[34903]: logger=infra.usagestats.collector t=2024-11-19T14:38:56.784359646-05:00 level=info msg="registering usage stat providers" usageStatsProvidersLen=2 Nov 19 14:38:56 managed-node3 grafana-server[34903]: logger=server t=2024-11-19T14:38:56.78450809-05:00 level=info msg="Writing PID file" path=/var/run/grafana/grafana-server.pid pid=34903 Nov 19 14:38:56 managed-node3 grafana-server[34903]: logger=provisioning.plugins t=2024-11-19T14:38:56.836167697-05:00 level=info msg="Updating app from configuration " type=performancecopilot-pcp-app enabled=true Nov 19 14:38:56 managed-node3 grafana-server[34903]: logger=plugindashboards t=2024-11-19T14:38:56.840181788-05:00 level=info msg="Plugin state changed" pluginId=performancecopilot-pcp-app enabled=true Nov 19 14:38:56 managed-node3 grafana-server[34903]: logger=plugindashboards t=2024-11-19T14:38:56.840233088-05:00 level=info msg="Syncing plugin dashboards to DB" pluginId=performancecopilot-pcp-app Nov 19 14:38:56 managed-node3 grafana-server[34903]: logger=plugindashboards t=2024-11-19T14:38:56.842569898-05:00 level=info msg="Auto updating App dashboard" dashboard="PCP Redis: Metric Preview (Graph)" newRev=3 oldRev=0 Nov 19 14:38:56 managed-node3 grafana-server[34903]: logger=plugindashboards t=2024-11-19T14:38:56.854287952-05:00 level=info msg="Auto updating App dashboard" dashboard="PCP Redis: Metric Preview (Table)" newRev=3 oldRev=0 Nov 19 14:38:56 managed-node3 grafana-server[34903]: logger=plugindashboards t=2024-11-19T14:38:56.86484189-05:00 level=info msg="Auto updating App dashboard" dashboard="PCP Vector Checklist" newRev=3 oldRev=0 Nov 19 14:38:56 managed-node3 grafana-server[34903]: logger=plugindashboards t=2024-11-19T14:38:56.876727039-05:00 level=info msg="Auto updating App dashboard" dashboard="PCP Vector Checklist: CPU" newRev=3 oldRev=0 Nov 19 14:38:56 managed-node3 grafana-server[34903]: logger=plugindashboards t=2024-11-19T14:38:56.887572356-05:00 level=info msg="Auto updating App dashboard" dashboard="PCP Vector Checklist: System CPU" newRev=3 oldRev=0 Nov 19 14:38:56 managed-node3 grafana-server[34903]: logger=plugindashboards t=2024-11-19T14:38:56.897942118-05:00 level=info msg="Auto updating App dashboard" dashboard="PCP Vector Checklist: User CPU" newRev=3 oldRev=0 Nov 19 14:38:56 managed-node3 grafana-server[34903]: logger=plugindashboards t=2024-11-19T14:38:56.908674776-05:00 level=info msg="Auto updating App dashboard" dashboard="PCP Vector Checklist: Storage" newRev=3 oldRev=0 Nov 19 14:38:56 managed-node3 grafana-server[34903]: logger=plugindashboards t=2024-11-19T14:38:56.928832363-05:00 level=info msg="Auto updating App dashboard" dashboard="PCP Vector Checklist: Memory" newRev=3 oldRev=0 Nov 19 14:38:56 managed-node3 grafana-server[34903]: logger=plugindashboards t=2024-11-19T14:38:56.940879377-05:00 level=info msg="Auto updating App dashboard" dashboard="PCP Vector Checklist: Swap Memory" newRev=3 oldRev=0 Nov 19 14:38:56 managed-node3 grafana-server[34903]: logger=plugindashboards t=2024-11-19T14:38:56.951969373-05:00 level=info msg="Auto updating App dashboard" dashboard="PCP Vector Checklist: Network" newRev=3 oldRev=0 Nov 19 14:38:56 managed-node3 grafana-server[34903]: logger=plugindashboards t=2024-11-19T14:38:56.963291122-05:00 level=info msg="Auto updating App dashboard" dashboard="PCP Vector Checklist: Network RX" newRev=3 oldRev=0 Nov 19 14:38:56 managed-node3 grafana-server[34903]: logger=plugindashboards t=2024-11-19T14:38:56.974588959-05:00 level=info msg="Auto updating App dashboard" dashboard="PCP Vector Checklist: Network TX" newRev=3 oldRev=0 Nov 19 14:38:56 managed-node3 grafana-server[34903]: logger=provisioning.alerting t=2024-11-19T14:38:56.989932899-05:00 level=error msg="can't read alerting provisioning files from directory" path=/etc/grafana/provisioning/alerting error="open /etc/grafana/provisioning/alerting: no such file or directory" Nov 19 14:38:56 managed-node3 grafana-server[34903]: logger=provisioning.alerting t=2024-11-19T14:38:56.989970365-05:00 level=info msg="starting to provision alerting" Nov 19 14:38:56 managed-node3 grafana-server[34903]: logger=provisioning.alerting t=2024-11-19T14:38:56.989994699-05:00 level=info msg="finished to provision alerting" Nov 19 14:38:56 managed-node3 systemd[1]: Started Grafana instance. -- Subject: Unit grafana-server.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit grafana-server.service has finished starting up. -- -- The start-up result is done. Nov 19 14:38:56 managed-node3 grafana-server[34903]: logger=http.server t=2024-11-19T14:38:56.993293249-05:00 level=info msg="HTTP Server Listen" address=[::]:3000 protocol=http subUrl= socket= Nov 19 14:38:56 managed-node3 grafana-server[34903]: logger=ngalert t=2024-11-19T14:38:56.993412162-05:00 level=info msg="warming cache for startup" Nov 19 14:38:57 managed-node3 grafana-server[34903]: logger=grafanaStorageLogger t=2024-11-19T14:38:57.004099919-05:00 level=info msg="storage starting" Nov 19 14:38:57 managed-node3 grafana-server[34903]: logger=ticker t=2024-11-19T14:38:57.013499876-05:00 level=info msg=starting first_tick=2024-11-19T14:39:00-05:00 Nov 19 14:38:57 managed-node3 grafana-server[34903]: logger=ngalert.multiorg.alertmanager t=2024-11-19T14:38:57.013553916-05:00 level=info msg="starting MultiOrg Alertmanager" Nov 19 14:38:57 managed-node3 platform-python[35115]: ansible-service_facts Invoked Nov 19 14:39:01 managed-node3 platform-python[35470]: ansible-ansible.legacy.setup Invoked with gather_subset=['all'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d Nov 19 14:39:02 managed-node3 platform-python[35700]: ansible-service_facts Invoked Nov 19 14:39:04 managed-node3 platform-python[35990]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Nov 19 14:39:05 managed-node3 platform-python[36191]: ansible-ansible.legacy.dnf Invoked with name=['redis'] state=present allow_downgrade=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 allowerasing=False nobest=False use_backend=auto conf_file=None disable_excludes=None download_dir=None list=None releasever=None Nov 19 14:39:08 managed-node3 platform-python[36393]: ansible-file Invoked with path=/etc/redis state=directory owner=redis group=root mode=0750 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:39:08 managed-node3 platform-python[36594]: ansible-ansible.legacy.stat Invoked with path=/etc/redis/redis.conf follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True Nov 19 14:39:09 managed-node3 platform-python[36697]: ansible-ansible.legacy.file Invoked with mode=0640 owner=redis group=root dest=/etc/redis/redis.conf _original_basename=CentOS_8_keyserver.conf.j2 recurse=False state=file path=/etc/redis/redis.conf force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:39:09 managed-node3 platform-python[36898]: ansible-file Invoked with src=/etc/redis/redis.conf dest=/etc/redis.conf state=link force=True path=/etc/redis.conf recurse=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:39:10 managed-node3 platform-python[37099]: ansible-ansible.legacy.systemd Invoked with name=redis state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Nov 19 14:39:11 managed-node3 platform-python[37304]: ansible-ansible.legacy.dnf Invoked with name=['pcp', 'pcp-zeroconf'] state=present allow_downgrade=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 allowerasing=False nobest=False use_backend=auto conf_file=None disable_excludes=None download_dir=None list=None releasever=None Nov 19 14:39:14 managed-node3 platform-python[37506]: ansible-ansible.legacy.dnf Invoked with name=['cyrus-sasl-lib', 'cyrus-sasl-scram'] state=present allow_downgrade=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 allowerasing=False nobest=False use_backend=auto conf_file=None disable_excludes=None download_dir=None list=None releasever=None Nov 19 14:39:17 managed-node3 platform-python[37708]: ansible-ansible.legacy.command Invoked with _raw_params=cat /etc/pcp/pmcd/pmcd.conf _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Nov 19 14:39:18 managed-node3 platform-python[37910]: ansible-file Invoked with path=/etc/pcp/labels state=directory mode=0755 owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:39:18 managed-node3 platform-python[38111]: ansible-file Invoked with path=/etc/pcp/labels/optional state=directory mode=0755 owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:39:19 managed-node3 platform-python[38312]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/labels/ansible-managed follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True Nov 19 14:39:19 managed-node3 platform-python[38415]: ansible-ansible.legacy.file Invoked with mode=0644 dest=/etc/pcp/labels/ansible-managed _original_basename=pmcd.explicit.labels.j2 recurse=False state=file path=/etc/pcp/labels/ansible-managed force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:39:20 managed-node3 platform-python[38616]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/labels/optional/ansible-managed follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True Nov 19 14:39:20 managed-node3 platform-python[38719]: ansible-ansible.legacy.file Invoked with mode=0644 dest=/etc/pcp/labels/optional/ansible-managed _original_basename=pmcd.implicit.labels.j2 recurse=False state=file path=/etc/pcp/labels/optional/ansible-managed force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:39:21 managed-node3 platform-python[38920]: ansible-ansible.legacy.stat Invoked with path=/etc/sysconfig/pmcd follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True Nov 19 14:39:21 managed-node3 platform-python[39023]: ansible-ansible.legacy.file Invoked with mode=0644 dest=/etc/sysconfig/pmcd _original_basename=pmcd.defaults.j2 recurse=False state=file path=/etc/sysconfig/pmcd force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:39:22 managed-node3 platform-python[39224]: ansible-user Invoked with name=metrics system=True state=present non_unique=False force=False remove=False create_home=True move_home=False append=False ssh_key_bits=0 ssh_key_type=rsa ssh_key_comment=ansible-generated on managed-node3 update_password=always uid=None group=None groups=None comment=None home=None shell=None password=NOT_LOGGING_PARAMETER login_class=None password_expire_max=None password_expire_min=None password_expire_warn=None hidden=None seuser=None skeleton=None generate_ssh_key=None ssh_key_file=None ssh_key_passphrase=NOT_LOGGING_PARAMETER expires=None password_lock=None local=None profile=None authorization=None role=None umask=None Nov 19 14:39:22 managed-node3 platform-python[39429]: ansible-ansible.legacy.command Invoked with _raw_params=set -eu if set -o | grep -q pipefail; then set -o pipefail # pipefail not supported on debian, some ubuntu fi if ! sasldblistusers2 -f "/etc/pcp/passwd.db" | grep -q "^metrics@"; then echo "Creating new metrics user in /etc/pcp/passwd.db" echo "metrics" | saslpasswd2 -a pmcd "metrics" chown root:pcp "/etc/pcp/passwd.db" chmod 640 "/etc/pcp/passwd.db" fi _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Nov 19 14:39:23 managed-node3 platform-python[39635]: ansible-ansible.legacy.stat Invoked with path=/etc/sasl2/pmcd.conf follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True Nov 19 14:39:23 managed-node3 platform-python[39738]: ansible-ansible.legacy.file Invoked with mode=0644 dest=/etc/sasl2/pmcd.conf _original_basename=pmcd.sasl2.conf.j2 recurse=False state=file path=/etc/sasl2/pmcd.conf force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:39:24 managed-node3 platform-python[39939]: ansible-ansible.legacy.systemd Invoked with name=pmcd state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Nov 19 14:39:25 managed-node3 platform-python[40144]: ansible-file Invoked with path=/etc/pcp/pmieconf/network state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:39:25 managed-node3 platform-python[40345]: ansible-file Invoked with path=/etc/pcp/pmieconf/power state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:39:26 managed-node3 platform-python[40546]: ansible-file Invoked with path=/etc/pcp/pmieconf/zeroconf state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:39:26 managed-node3 platform-python[40747]: ansible-file Invoked with path=/etc/pcp/pmieconf/filesys state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:39:27 managed-node3 platform-python[40948]: ansible-file Invoked with path=/var/lib/pcp/config/pmieconf/network state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:39:27 managed-node3 platform-python[41149]: ansible-file Invoked with path=/var/lib/pcp/config/pmieconf/power state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:39:28 managed-node3 platform-python[41350]: ansible-file Invoked with path=/var/lib/pcp/config/pmieconf/zeroconf state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:39:28 managed-node3 platform-python[41551]: ansible-file Invoked with path=/var/lib/pcp/config/pmieconf/filesys state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:39:29 managed-node3 platform-python[41752]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/pmieconf/network/tcplistenoverflows follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True Nov 19 14:39:29 managed-node3 platform-python[41855]: ansible-ansible.legacy.file Invoked with owner=root group=root mode=0644 dest=/etc/pcp/pmieconf/network/tcplistenoverflows _original_basename=tcplistenoverflows recurse=False state=file path=/etc/pcp/pmieconf/network/tcplistenoverflows force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:39:30 managed-node3 platform-python[42056]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/pmieconf/network/tcpqfulldocookies follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True Nov 19 14:39:30 managed-node3 platform-python[42159]: ansible-ansible.legacy.file Invoked with owner=root group=root mode=0644 dest=/etc/pcp/pmieconf/network/tcpqfulldocookies _original_basename=tcpqfulldocookies recurse=False state=file path=/etc/pcp/pmieconf/network/tcpqfulldocookies force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:39:30 managed-node3 platform-python[42360]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/pmieconf/network/tcpqfulldrops follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True Nov 19 14:39:31 managed-node3 platform-python[42463]: ansible-ansible.legacy.file Invoked with owner=root group=root mode=0644 dest=/etc/pcp/pmieconf/network/tcpqfulldrops _original_basename=tcpqfulldrops recurse=False state=file path=/etc/pcp/pmieconf/network/tcpqfulldrops force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:39:31 managed-node3 platform-python[42664]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/pmieconf/power/thermal_throttle follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True Nov 19 14:39:32 managed-node3 platform-python[42767]: ansible-ansible.legacy.file Invoked with owner=root group=root mode=0644 dest=/etc/pcp/pmieconf/power/thermal_throttle _original_basename=thermal_throttle recurse=False state=file path=/etc/pcp/pmieconf/power/thermal_throttle force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:39:32 managed-node3 platform-python[42968]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/pmieconf/zeroconf/all_threads follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True Nov 19 14:39:33 managed-node3 platform-python[43071]: ansible-ansible.legacy.file Invoked with owner=root group=root mode=0644 dest=/etc/pcp/pmieconf/zeroconf/all_threads _original_basename=all_threads recurse=False state=file path=/etc/pcp/pmieconf/zeroconf/all_threads force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:39:33 managed-node3 platform-python[43272]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/pmieconf/filesys/vfs_files follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True Nov 19 14:39:33 managed-node3 platform-python[43375]: ansible-ansible.legacy.file Invoked with owner=root group=root mode=0644 dest=/etc/pcp/pmieconf/filesys/vfs_files _original_basename=vfs_files recurse=False state=file path=/etc/pcp/pmieconf/filesys/vfs_files force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:39:34 managed-node3 platform-python[43576]: ansible-lineinfile Invoked with state=absent path=/var/lib/pcp/config/pmie/config.default regexp=//.*global webhook_endpoint = "" backrefs=False create=False backup=False firstmatch=False unsafe_writes=False search_string=None line=None insertafter=None insertbefore=None validate=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:39:35 managed-node3 platform-python[43777]: ansible-file Invoked with src=/etc/pcp/pmieconf/network/tcplistenoverflows dest=/var/lib/pcp/config/pmieconf/network/tcplistenoverflows state=link force=True path=/var/lib/pcp/config/pmieconf/network/tcplistenoverflows recurse=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:39:35 managed-node3 platform-python[43978]: ansible-file Invoked with src=/etc/pcp/pmieconf/network/tcpqfulldocookies dest=/var/lib/pcp/config/pmieconf/network/tcpqfulldocookies state=link force=True path=/var/lib/pcp/config/pmieconf/network/tcpqfulldocookies recurse=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:39:36 managed-node3 platform-python[44179]: ansible-file Invoked with src=/etc/pcp/pmieconf/network/tcpqfulldrops dest=/var/lib/pcp/config/pmieconf/network/tcpqfulldrops state=link force=True path=/var/lib/pcp/config/pmieconf/network/tcpqfulldrops recurse=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:39:36 managed-node3 platform-python[44380]: ansible-file Invoked with src=/etc/pcp/pmieconf/power/thermal_throttle dest=/var/lib/pcp/config/pmieconf/power/thermal_throttle state=link force=True path=/var/lib/pcp/config/pmieconf/power/thermal_throttle recurse=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:39:37 managed-node3 platform-python[44581]: ansible-file Invoked with src=/etc/pcp/pmieconf/zeroconf/all_threads dest=/var/lib/pcp/config/pmieconf/zeroconf/all_threads state=link force=True path=/var/lib/pcp/config/pmieconf/zeroconf/all_threads recurse=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:39:37 managed-node3 platform-python[44782]: ansible-file Invoked with src=/etc/pcp/pmieconf/filesys/vfs_files dest=/var/lib/pcp/config/pmieconf/filesys/vfs_files state=link force=True path=/var/lib/pcp/config/pmieconf/filesys/vfs_files recurse=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:39:38 managed-node3 platform-python[44983]: ansible-ansible.legacy.systemd Invoked with name=pmie state=restarted enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Nov 19 14:39:38 managed-node3 systemd[1]: Stopping pmie farm service... -- Subject: Unit pmie_farm.service has begun shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit pmie_farm.service has begun shutting down. Nov 19 14:39:38 managed-node3 systemd[1]: pmie_farm.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit pmie_farm.service has successfully entered the 'dead' state. Nov 19 14:39:38 managed-node3 systemd[1]: Stopped pmie farm service. -- Subject: Unit pmie_farm.service has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit pmie_farm.service has finished shutting down. Nov 19 14:39:38 managed-node3 systemd[1]: Stopping Performance Metrics Inference Engine... -- Subject: Unit pmie.service has begun shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit pmie.service has begun shutting down. Nov 19 14:39:38 managed-node3 systemd[1]: pmie.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit pmie.service has successfully entered the 'dead' state. Nov 19 14:39:38 managed-node3 systemd[1]: Stopped Performance Metrics Inference Engine. -- Subject: Unit pmie.service has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit pmie.service has finished shutting down. Nov 19 14:39:38 managed-node3 systemd[1]: Starting Performance Metrics Inference Engine... -- Subject: Unit pmie.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit pmie.service has begun starting up. Nov 19 14:39:39 managed-node3 systemd[1]: Started Performance Metrics Inference Engine. -- Subject: Unit pmie.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit pmie.service has finished starting up. -- -- The start-up result is done. Nov 19 14:39:39 managed-node3 systemd[1]: Starting pmie farm service... -- Subject: Unit pmie_farm.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit pmie_farm.service has begun starting up. Nov 19 14:39:39 managed-node3 systemd[1]: Started pmie farm service. -- Subject: Unit pmie_farm.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit pmie_farm.service has finished starting up. -- -- The start-up result is done. Nov 19 14:39:39 managed-node3 platform-python[45762]: ansible-lineinfile Invoked with path=/etc/pcp.conf regexp=^PCP_ARCHIVE_DIR= line=PCP_ARCHIVE_DIR=/var/log/pcp/pmlogger state=present backrefs=False create=False backup=False firstmatch=False unsafe_writes=False search_string=None insertafter=None insertbefore=None validate=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:39:40 managed-node3 platform-python[45963]: ansible-ansible.legacy.stat Invoked with path=/etc/sysconfig/pmlogger follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True Nov 19 14:39:40 managed-node3 platform-python[46066]: ansible-ansible.legacy.file Invoked with mode=0644 dest=/etc/sysconfig/pmlogger _original_basename=pmlogger.defaults.j2 recurse=False state=file path=/etc/sysconfig/pmlogger force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:39:41 managed-node3 platform-python[46267]: ansible-ansible.legacy.stat Invoked with path=/etc/sysconfig/pmlogger_timers follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True Nov 19 14:39:41 managed-node3 platform-python[46370]: ansible-ansible.legacy.file Invoked with mode=0644 dest=/etc/sysconfig/pmlogger_timers _original_basename=pmlogger.timers.j2 recurse=False state=file path=/etc/sysconfig/pmlogger_timers force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:39:42 managed-node3 platform-python[46571]: ansible-ansible.legacy.systemd Invoked with name=pmlogger state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Nov 19 14:39:42 managed-node3 platform-python[46776]: ansible-ansible.legacy.stat Invoked with path=/etc/sysconfig/pmproxy follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True Nov 19 14:39:43 managed-node3 platform-python[46879]: ansible-ansible.legacy.file Invoked with mode=0644 dest=/etc/sysconfig/pmproxy _original_basename=pmproxy.defaults.j2 recurse=False state=file path=/etc/sysconfig/pmproxy force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:39:43 managed-node3 platform-python[47080]: ansible-ansible.legacy.systemd Invoked with name=pmproxy state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Nov 19 14:39:44 managed-node3 platform-python[47285]: ansible-ansible.legacy.dnf Invoked with name=['grafana'] state=present allow_downgrade=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 allowerasing=False nobest=False use_backend=auto conf_file=None disable_excludes=None download_dir=None list=None releasever=None Nov 19 14:39:48 managed-node3 platform-python[47487]: ansible-package_facts Invoked with manager=['auto'] strategy=first Nov 19 14:39:50 managed-node3 platform-python[47690]: ansible-ansible.legacy.stat Invoked with path=/etc/grafana/grafana.ini follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True Nov 19 14:39:50 managed-node3 platform-python[47793]: ansible-ansible.legacy.file Invoked with mode=0640 dest=/etc/grafana/grafana.ini _original_basename=grafana_9.ini.j2 recurse=False state=file path=/etc/grafana/grafana.ini force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:39:51 managed-node3 platform-python[47994]: ansible-file Invoked with path=/etc/grafana/provisioning/datasources state=directory group=grafana owner=root mode=0750 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:39:51 managed-node3 platform-python[48195]: ansible-ansible.legacy.stat Invoked with path=/etc/grafana/provisioning/datasources/grafana-pcp.yaml follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True Nov 19 14:39:52 managed-node3 platform-python[48298]: ansible-ansible.legacy.file Invoked with mode=0644 dest=/etc/grafana/provisioning/datasources/grafana-pcp.yaml _original_basename=grafana-pcp-datasources.yaml.j2 recurse=False state=file path=/etc/grafana/provisioning/datasources/grafana-pcp.yaml force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:39:52 managed-node3 platform-python[48499]: ansible-ansible.legacy.systemd Invoked with name=grafana-server state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Nov 19 14:39:53 managed-node3 platform-python[48705]: ansible-ansible.legacy.stat Invoked with path=/etc/grafana/provisioning/plugins/grafana-pcp.yaml follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True Nov 19 14:39:53 managed-node3 platform-python[48808]: ansible-ansible.legacy.file Invoked with owner=root group=grafana mode=0640 dest=/etc/grafana/provisioning/plugins/grafana-pcp.yaml _original_basename=grafana-pcp-provisioning.yaml recurse=False state=file path=/etc/grafana/provisioning/plugins/grafana-pcp.yaml force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:39:55 managed-node3 platform-python[49011]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Nov 19 14:39:55 managed-node3 platform-python[49212]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Nov 19 14:39:56 managed-node3 platform-python[49413]: ansible-ansible.legacy.dnf Invoked with name=['firewalld'] state=present allow_downgrade=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 allowerasing=False nobest=False use_backend=auto conf_file=None disable_excludes=None download_dir=None list=None releasever=None Nov 19 14:40:00 managed-node3 platform-python[49615]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False scope=system no_block=False state=None enabled=None force=None Nov 19 14:40:01 managed-node3 platform-python[49820]: ansible-ansible.legacy.systemd Invoked with name=firewalld state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Nov 19 14:40:01 managed-node3 systemd[1]: Reloading. Nov 19 14:40:01 managed-node3 systemd[1]: /usr/lib/systemd/system/grafana-server.service:28: Unknown lvalue 'ProtectClock' in section 'Service' Nov 19 14:40:01 managed-node3 systemd[1]: /usr/lib/systemd/system/grafana-server.service:31: Unknown lvalue 'ProtectHostname' in section 'Service' Nov 19 14:40:01 managed-node3 systemd[1]: /usr/lib/systemd/system/grafana-server.service:32: Unknown lvalue 'ProtectKernelLogs' in section 'Service' Nov 19 14:40:01 managed-node3 systemd[1]: /usr/lib/systemd/system/grafana-server.service:35: Unknown lvalue 'ProtectProc' in section 'Service' Nov 19 14:40:01 managed-node3 systemd[1]: Starting firewalld - dynamic firewall daemon... -- Subject: Unit firewalld.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit firewalld.service has begun starting up. Nov 19 14:40:01 managed-node3 systemd[1]: Started firewalld - dynamic firewall daemon. -- Subject: Unit firewalld.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit firewalld.service has finished starting up. -- -- The start-up result is done. Nov 19 14:40:02 managed-node3 firewalld[49851]: WARNING: AllowZoneDrifting is enabled. This is considered an insecure configuration option. It will be removed in a future release. Please consider disabling it now. Nov 19 14:40:03 managed-node3 platform-python[50117]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['44321/tcp'] permanent=True runtime=True state=enabled __report_changed=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Nov 19 14:40:04 managed-node3 platform-python[50318]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['44322/tcp'] permanent=True runtime=True state=enabled __report_changed=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Nov 19 14:40:04 managed-node3 platform-python[50519]: ansible-fedora.linux_system_roles.firewall_lib Invoked with service=['grafana'] permanent=True runtime=True state=enabled __report_changed=True port=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Nov 19 14:40:05 managed-node3 platform-python[50720]: ansible-fedora.linux_system_roles.firewall_lib Invoked with service=['redis'] permanent=True runtime=True state=enabled __report_changed=True port=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Nov 19 14:40:07 managed-node3 platform-python[50921]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Nov 19 14:40:08 managed-node3 platform-python[51122]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Nov 19 14:40:08 managed-node3 platform-python[51323]: ansible-ansible.legacy.dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 allowerasing=False nobest=False use_backend=auto conf_file=None disable_excludes=None download_dir=None list=None releasever=None Nov 19 14:40:12 managed-node3 platform-python[51525]: ansible-ansible.legacy.dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 allowerasing=False nobest=False use_backend=auto conf_file=None disable_excludes=None download_dir=None list=None releasever=None Nov 19 14:40:15 managed-node3 platform-python[51727]: ansible-setup Invoked with filter=['ansible_selinux'] gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d Nov 19 14:40:16 managed-node3 platform-python[51957]: ansible-seboolean Invoked with name=pcp_bind_all_unreserved_ports state=True persistent=False ignore_selinux_state=False Nov 19 14:40:16 managed-node3 dbus-daemon[615]: [system] Reloaded configuration Nov 19 14:40:17 managed-node3 platform-python[52158]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked Nov 19 14:40:21 managed-node3 platform-python[52359]: ansible-ansible.legacy.command Invoked with _raw_params=pcp _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Nov 19 14:40:22 managed-node3 platform-python[52651]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail pmprobe -I pmcd.pmlogger.pmcd_host | grep '"primary"' _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Nov 19 14:40:22 managed-node3 platform-python[52855]: ansible-ansible.legacy.command Invoked with _raw_params=grep "^# Ansible managed" "/etc/sysconfig/pmlogger" _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Nov 19 14:40:23 managed-node3 platform-python[53057]: ansible-ansible.legacy.command Invoked with _raw_params=grep "^# Ansible managed" "/etc/sysconfig/pmlogger_timers" _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Nov 19 14:40:23 managed-node3 platform-python[53259]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail pmprobe -I pmcd.pmie.pmcd_host | grep '"primary"' _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Nov 19 14:40:24 managed-node3 platform-python[53463]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail redis-cli PING | grep PONG _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Nov 19 14:40:25 managed-node3 platform-python[53667]: ansible-ansible.legacy.uri Invoked with url=http://localhost:44322/series/ping method=GET status_code=[200] force=False http_agent=ansible-httpget use_proxy=True validate_certs=True force_basic_auth=False use_gssapi=False body_format=raw return_content=False follow_redirects=safe timeout=30 headers={} remote_src=False unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None dest=None body=None src=None creates=None removes=None unix_socket=None ca_path=None ciphers=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:40:25 managed-node3 platform-python[53870]: ansible-ansible.legacy.command Invoked with _raw_params=grep "^# Ansible managed" "/etc/sysconfig/pmproxy" _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Nov 19 14:40:26 managed-node3 platform-python[54072]: ansible-ansible.legacy.uri Invoked with url=http://localhost:3000/login method=GET status_code=[200] force=False http_agent=ansible-httpget use_proxy=True validate_certs=True force_basic_auth=False use_gssapi=False body_format=raw return_content=False follow_redirects=safe timeout=30 headers={} remote_src=False unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None dest=None body=None src=None creates=None removes=None unix_socket=None ca_path=None ciphers=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:40:27 managed-node3 platform-python[54275]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail (cd /tmp && /usr/sbin/grafana-cli plugins ls) | grep performancecopilot _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Nov 19 14:40:27 managed-node3 runuser[54280]: pam_unix(runuser:session): session opened for user grafana by root(uid=0) Nov 19 14:40:27 managed-node3 runuser[54280]: pam_unix(runuser:session): session closed for user grafana Nov 19 14:40:27 managed-node3 platform-python[54488]: ansible-ansible.legacy.command Invoked with _raw_params=grep "^# Ansible managed" "/etc/grafana/grafana.ini" _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Nov 19 14:40:28 managed-node3 platform-python[54690]: ansible-ansible.legacy.command Invoked with _raw_params=firewall-cmd --list-services _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Nov 19 14:40:29 managed-node3 platform-python[54892]: ansible-ansible.legacy.command Invoked with _raw_params=firewall-cmd --list-services _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Nov 19 14:40:30 managed-node3 platform-python[55094]: ansible-ansible.legacy.command Invoked with _raw_params=firewall-cmd --list-ports _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Nov 19 14:40:30 managed-node3 platform-python[55296]: ansible-ansible.legacy.command Invoked with _raw_params=firewall-cmd --list-ports _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Nov 19 14:40:31 managed-node3 platform-python[55498]: ansible-ansible.legacy.command Invoked with _raw_params=firewall-cmd --list-ports _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Nov 19 14:40:32 managed-node3 platform-python[55700]: ansible-ansible.legacy.command Invoked with _raw_params=journalctl -ex echo '##################' echo List of SELinux AVCs - note list may be empty grep type=AVC /var/log/audit/audit.log echo '##################' ls -alrtF /run if [ -d /run/pcp ]; then ls -alrtF /run/pcp else echo ERROR - /run/pcp does not exist fi _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None ################## List of SELinux AVCs - note list may be empty ################## total 40 dr-xr-xr-x. 17 root root 224 May 29 03:47 ../ drwxr-xr-x. 2 root root 60 Nov 19 14:30 tmpfiles.d/ drwxr-xr-x. 3 root root 60 Nov 19 14:30 log/ drwxr-xr-x. 2 root root 40 Nov 19 14:30 mount/ drwxr-xr-x. 4 root root 100 Nov 19 14:30 initramfs/ -r--r--r--. 1 root root 33 Nov 19 14:30 machine-id prw-------. 1 root root 0 Nov 19 14:30 initctl| srw-rw-rw-. 1 root root 0 Nov 19 14:30 rpcbind.sock= drwx------. 2 root root 40 Nov 19 14:30 cryptsetup/ drwxr-xr-x. 2 root root 40 Nov 19 14:30 setrans/ drwxr-xr-x. 2 root root 40 Nov 19 14:30 sepermit/ drwxr-xr-x. 2 root root 40 Nov 19 14:30 motd.d/ drwxr-xr-x. 2 root root 40 Nov 19 14:30 faillock/ drwxr-xr-x. 2 root root 40 Nov 19 14:30 console/ drwx--x--x. 3 root root 60 Nov 19 14:30 sudo/ drwx------. 2 rpc rpc 60 Nov 19 14:30 rpcbind/ -rw-r--r--. 1 root root 4 Nov 19 14:30 auditd.pid srw-rw-rw-. 1 root root 0 Nov 19 14:30 .heim_org.h5l.kcm-socket= drwxr-xr-x. 2 root root 60 Nov 19 14:30 dbus/ drwxr-xr-x. 2 root root 60 Nov 19 14:30 irqbalance/ -rw-r--r--. 1 root root 694 Nov 19 14:30 dhclient.lease -rw-r--r--. 1 root root 4 Nov 19 14:30 dhclient.pid -rw-------. 1 root root 4 Nov 19 14:30 gssproxy.pid srw-rw-rw-. 1 root root 0 Nov 19 14:30 gssproxy.sock= drwxr-xr-x. 2 root root 100 Nov 19 14:30 chrony-helper/ drwxr-x---. 2 chrony chrony 80 Nov 19 14:30 chrony/ drwxr-xr-x. 2 root root 60 Nov 19 14:30 tuned/ -rw-------. 1 root root 5 Nov 19 14:30 sm-notify.pid drwxr-xr-x. 3 root root 80 Nov 19 14:30 lock/ -rw-r--r--. 1 root root 5 Nov 19 14:30 sshd.pid -rw-------. 1 root root 4 Nov 19 14:30 rsyslogd.pid -rw-r--r--. 1 root root 5 Nov 19 14:30 crond.pid ----------. 1 root root 0 Nov 19 14:30 cron.reboot drwxr-xr-x. 2 root root 40 Nov 19 14:30 plymouth/ drwxr-xr-x. 2 root root 80 Nov 19 14:30 blkid/ drwx------. 3 root root 340 Nov 19 14:30 cloud-init/ -rw-------. 1 root root 0 Nov 19 14:30 agetty.reload drwxr-xr-x. 3 root root 60 Nov 19 14:33 user/ drwxr-xr-x. 6 root root 160 Nov 19 14:35 NetworkManager/ drwxr-xr-x. 2 redis redis 40 Nov 19 14:38 redis/ drwxr-x---. 2 grafana grafana 60 Nov 19 14:38 grafana/ drwxrwxr-x. 2 pcp pcp 200 Nov 19 14:39 pcp/ drwxr-xr-x. 18 root root 460 Nov 19 14:40 systemd/ drwxr-xr-x. 31 root root 940 Nov 19 14:40 ./ drwxr-x---. 2 root root 40 Nov 19 14:40 firewalld/ drwxr-xr-x. 7 root root 160 Nov 19 14:40 udev/ -rw-rw-r--. 1 root utmp 1536 Nov 19 14:40 utmp total 16 srw-rw-rw-. 1 root root 0 Nov 19 14:37 pmcd.socket= -r--r--r--. 1 root root 5 Nov 19 14:37 pmcd.pid srwxrwxrwx. 1 root root 0 Nov 19 14:38 pmproxy.socket= -r--r--r--. 1 root root 5 Nov 19 14:38 pmproxy.pid lrwxrwxrwx. 1 pcp pcp 30 Nov 19 14:38 pmlogger.primary.socket -> /run/pcp/pmlogger.34408.socket= -r--r--r--. 1 pcp pcp 5 Nov 19 14:38 pmlogger.pid srw-rw-rw-. 1 pcp pcp 0 Nov 19 14:38 pmlogger.34408.socket= -r--r--r--. 1 pcp pcp 5 Nov 19 14:39 pmie.pid drwxrwxr-x. 2 pcp pcp 200 Nov 19 14:39 ./ drwxr-xr-x. 31 root root 940 Nov 19 14:40 ../ TASK [Reraise error] *********************************************************** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/tests/metrics/handle_test_failure.yml:17 Tuesday 19 November 2024 14:40:32 -0500 (0:00:00.630) 0:01:32.913 ****** fatal: [managed-node3]: FAILED! => { "changed": false } MSG: {'changed': False, 'stdout': '44321/tcp 44322/tcp', 'stderr': '', 'rc': 0, 'cmd': ['firewall-cmd', '--list-ports'], 'start': '2024-11-19 14:40:31.854360', 'end': '2024-11-19 14:40:32.118368', 'delta': '0:00:00.264008', 'msg': '', 'invocation': {'module_args': {'_raw_params': 'firewall-cmd --list-ports', '_uses_shell': False, 'expand_argument_vars': True, 'stdin_add_newline': True, 'strip_empty_ends': True, 'argv': None, 'chdir': None, 'executable': None, 'creates': None, 'removes': None, 'stdin': None}}, 'stdout_lines': ['44321/tcp 44322/tcp'], 'stderr_lines': [], '_ansible_no_log': False, 'failed': True, 'failed_when_result': True} TASK [Get final state of services] ********************************************* task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/tests/metrics/restore_services_state.yml:3 Tuesday 19 November 2024 14:40:33 -0500 (0:00:00.068) 0:01:32.982 ****** ok: [managed-node3] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "auto-cpufreq.service": { "name": "auto-cpufreq.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "avahi-daemon.service": { "name": "avahi-daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "chrony-dnssrv@.service": { "name": "chrony-dnssrv@.service", "source": "systemd", "state": "unknown", "status": "static" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "cpupower.service": { "name": "cpupower.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.fedoraproject.FirewallD1.service": { "name": "dbus-org.fedoraproject.FirewallD1.service", "source": "systemd", "state": "active", "status": "enabled" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "static" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "dbus-org.freedesktop.portable1.service": { "name": "dbus-org.freedesktop.portable1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "running", "status": "static" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "ebtables.service": { "name": "ebtables.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "running", "status": "enabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "inactive", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "grafana-server.service": { "name": "grafana-server.service", "source": "systemd", "state": "running", "status": "enabled" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "halt-local.service": { "name": "halt-local.service", "source": "systemd", "state": "inactive", "status": "static" }, "hv_kvp_daemon.service": { "name": "hv_kvp_daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "import-state.service": { "name": "import-state.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "ip6tables.service": { "name": "ip6tables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iprdump.service": { "name": "iprdump.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "iprinit.service": { "name": "iprinit.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "iprupdate.service": { "name": "iprupdate.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ipset.service": { "name": "ipset.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iptables.service": { "name": "iptables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "loadmodules.service": { "name": "loadmodules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "mariadb.service": { "name": "mariadb.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "messagebus.service": { "name": "messagebus.service", "source": "systemd", "state": "active", "status": "static" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "modprobe@efi_pstore.service": { "name": "modprobe@efi_pstore.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "mysqld.service": { "name": "mysqld.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "network.service": { "name": "network.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-convert.service": { "name": "nfs-convert.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "oddjobd.service": { "name": "oddjobd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "plymouth-halt.service": { "name": "plymouth-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-kexec.service": { "name": "plymouth-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-poweroff.service": { "name": "plymouth-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-quit.service": { "name": "plymouth-quit.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-read-write.service": { "name": "plymouth-read-write.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-reboot.service": { "name": "plymouth-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-switch-root-initramfs.service": { "name": "plymouth-switch-root-initramfs.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-switch-root.service": { "name": "plymouth-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "pmcd.service": { "name": "pmcd.service", "source": "systemd", "state": "running", "status": "enabled" }, "pmfind.service": { "name": "pmfind.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "pmie.service": { "name": "pmie.service", "source": "systemd", "state": "running", "status": "enabled" }, "pmie_check.service": { "name": "pmie_check.service", "source": "systemd", "state": "stopped", "status": "static" }, "pmie_daily.service": { "name": "pmie_daily.service", "source": "systemd", "state": "stopped", "status": "static" }, "pmie_farm.service": { "name": "pmie_farm.service", "source": "systemd", "state": "running", "status": "disabled" }, "pmie_farm_check.service": { "name": "pmie_farm_check.service", "source": "systemd", "state": "stopped", "status": "static" }, "pmlogger.service": { "name": "pmlogger.service", "source": "systemd", "state": "running", "status": "enabled" }, "pmlogger_check.service": { "name": "pmlogger_check.service", "source": "systemd", "state": "stopped", "status": "static" }, "pmlogger_daily.service": { "name": "pmlogger_daily.service", "source": "systemd", "state": "stopped", "status": "static" }, "pmlogger_daily_report.service": { "name": "pmlogger_daily_report.service", "source": "systemd", "state": "inactive", "status": "static" }, "pmlogger_farm.service": { "name": "pmlogger_farm.service", "source": "systemd", "state": "running", "status": "disabled" }, "pmlogger_farm_check.service": { "name": "pmlogger_farm_check.service", "source": "systemd", "state": "stopped", "status": "static" }, "pmproxy.service": { "name": "pmproxy.service", "source": "systemd", "state": "running", "status": "enabled" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "running", "status": "static" }, "postgresql.service": { "name": "postgresql.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "power-profiles-daemon.service": { "name": "power-profiles-daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quotaon.service": { "name": "quotaon.service", "source": "systemd", "state": "inactive", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rdisc.service": { "name": "rdisc.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "redis-sentinel.service": { "name": "redis-sentinel.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "redis.service": { "name": "redis.service", "source": "systemd", "state": "running", "status": "enabled" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "snapd.seeded.service": { "name": "snapd.seeded.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "active", "status": "enabled" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-plymouth.service": { "name": "systemd-ask-password-plymouth.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-resume@.service": { "name": "systemd-hibernate-resume@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-networkd-wait-online.service": { "name": "systemd-networkd-wait-online.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-portabled.service": { "name": "systemd-portabled.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-quotacheck.service": { "name": "systemd-quotacheck.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-resolved.service": { "name": "systemd-resolved.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "masked" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "tcsd.service": { "name": "tcsd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "teamd@.service": { "name": "teamd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "timedatex.service": { "name": "timedatex.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "tlp.service": { "name": "tlp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "tuned.service": { "name": "tuned.service", "source": "systemd", "state": "running", "status": "enabled" }, "unbound-anchor.service": { "name": "unbound-anchor.service", "source": "systemd", "state": "stopped", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "active" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "active" }, "ypbind.service": { "name": "ypbind.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "yppasswdd.service": { "name": "yppasswdd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ypserv.service": { "name": "ypserv.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ypxfrd.service": { "name": "ypxfrd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "zabbix-agent.service": { "name": "zabbix-agent.service", "source": "systemd", "state": "stopped", "status": "not-found" } } }, "changed": false } TASK [Restore state of services] *********************************************** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/tests/metrics/restore_services_state.yml:9 Tuesday 19 November 2024 14:40:34 -0500 (0:00:01.852) 0:01:34.834 ****** ok: [managed-node3] => (item=pmcd) => { "ansible_loop_var": "item", "changed": false, "item": "pmcd", "name": "pmcd", "state": "started", "status": { "ActiveEnterTimestamp": "Tue 2024-11-19 14:37:39 EST", "ActiveEnterTimestampMonotonic": "452428617", "ActiveExitTimestamp": "Tue 2024-11-19 14:37:38 EST", "ActiveExitTimestampMonotonic": "451383313", "ActiveState": "active", "After": "avahi-daemon.service systemd-journald.socket basic.target system.slice sysinit.target network-online.target", "AllowIsolate": "no", "AllowedCPUs": "", "AllowedMemoryNodes": "", "AmbientCapabilities": "", "AssertResult": "yes", "AssertTimestamp": "Tue 2024-11-19 14:37:39 EST", "AssertTimestampMonotonic": "452118064", "Before": "shutdown.target pmproxy.service pmie.service multi-user.target pmlogger.service zabbix-agent.service", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "no", "CPUAffinity": "", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Tue 2024-11-19 14:37:39 EST", "ConditionTimestampMonotonic": "452118063", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroup": "/system.slice/pmcd.service", "ControlPID": "0", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "Performance Metrics Collector Daemon", "DevicePolicy": "auto", "Documentation": "man:pmcd(1)", "DynamicUser": "no", "EffectiveCPUs": "", "EffectiveMemoryNodes": "", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "21673", "ExecMainStartTimestamp": "Tue 2024-11-19 14:37:39 EST", "ExecMainStartTimestampMonotonic": "452428598", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/libexec/pcp/lib/pmcd ; argv[]=/usr/libexec/pcp/lib/pmcd start-systemd ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStop": "{ path=/usr/libexec/pcp/lib/pmcd ; argv[]=/usr/libexec/pcp/lib/pmcd stop-systemd ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FragmentPath": "/usr/lib/systemd/system/pmcd.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOSchedulingClass": "0", "IOSchedulingPriority": "0", "IOWeight": "[not set]", "IPAccounting": "no", "IPEgressBytes": "18446744073709551615", "IPEgressPackets": "18446744073709551615", "IPIngressBytes": "18446744073709551615", "IPIngressPackets": "18446744073709551615", "Id": "pmcd.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestamp": "Tue 2024-11-19 14:37:39 EST", "InactiveEnterTimestampMonotonic": "452116883", "InactiveExitTimestamp": "Tue 2024-11-19 14:37:39 EST", "InactiveExitTimestampMonotonic": "452119123", "InvocationID": "ca073c479f2a4f3297bd172d338c087c", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "0", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "65536", "LimitMEMLOCKSoft": "65536", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "262144", "LimitNOFILESoft": "1024", "LimitNPROC": "14003", "LimitNPROCSoft": "14003", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "14003", "LimitSIGPENDINGSoft": "14003", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "21673", "MemoryAccounting": "yes", "MemoryCurrent": "167968768", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "MountFlags": "", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAMask": "", "NUMAPolicy": "n/a", "Names": "pmcd.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "all", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "PIDFile": "/run/pcp/pmcd.pid", "PermissionsStartOnly": "no", "Perpetual": "no", "PrivateDevices": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "system.slice sysinit.target", "Restart": "always", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system.slice", "StandardError": "inherit", "StandardInput": "null", "StandardInputData": "", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestamp": "Tue 2024-11-19 14:37:39 EST", "StateChangeTimestampMonotonic": "452428617", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "running", "SuccessAction": "none", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "0", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "22", "TasksMax": "22405", "TimeoutStartUSec": "1min 30s", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "enabled", "UtmpMode": "init", "WantedBy": "pmie.service pmlogger.service multi-user.target", "WatchdogTimestamp": "Tue 2024-11-19 14:37:39 EST", "WatchdogTimestampMonotonic": "452428613", "WatchdogUSec": "0" } } ok: [managed-node3] => (item=pmlogger) => { "ansible_loop_var": "item", "changed": false, "item": "pmlogger", "name": "pmlogger", "state": "started", "status": { "ActiveEnterTimestamp": "Tue 2024-11-19 14:38:54 EST", "ActiveEnterTimestampMonotonic": "527917875", "ActiveExitTimestamp": "Tue 2024-11-19 14:38:53 EST", "ActiveExitTimestampMonotonic": "526836619", "ActiveState": "active", "After": "sysinit.target pmcd.service systemd-journald.socket basic.target system.slice network-online.target", "AllowIsolate": "no", "AllowedCPUs": "", "AllowedMemoryNodes": "", "AmbientCapabilities": "", "AssertResult": "yes", "AssertTimestamp": "Tue 2024-11-19 14:38:53 EST", "AssertTimestampMonotonic": "526915609", "Before": "pmlogger_farm.service pmlogger_daily.timer shutdown.target pmlogger_check.timer multi-user.target", "BindsTo": "pmlogger_farm.service pmlogger_daily.timer pmlogger_check.timer", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "no", "CPUAffinity": "", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Tue 2024-11-19 14:38:53 EST", "ConditionTimestampMonotonic": "526915607", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ConsistsOf": "pmlogger_farm.service", "ControlGroup": "/system.slice/pmlogger.service", "ControlPID": "0", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "Performance Metrics Archive Logger", "DevicePolicy": "auto", "Documentation": "man:pmlogger(1)", "DynamicUser": "no", "EffectiveCPUs": "", "EffectiveMemoryNodes": "", "Environment": "PMLOGGER_CHECK_PARAMS=--only-primary", "EnvironmentFiles": "/etc/sysconfig/pmlogger (ignore_errors=yes)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "34408", "ExecMainStartTimestamp": "Tue 2024-11-19 14:38:54 EST", "ExecMainStartTimestampMonotonic": "527917851", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/libexec/pcp/lib/pmlogger ; argv[]=/usr/libexec/pcp/lib/pmlogger start-systemd ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStop": "{ path=/usr/libexec/pcp/lib/pmlogger ; argv[]=/usr/libexec/pcp/lib/pmlogger stop-systemd ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FragmentPath": "/usr/lib/systemd/system/pmlogger.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOSchedulingClass": "0", "IOSchedulingPriority": "0", "IOWeight": "[not set]", "IPAccounting": "no", "IPEgressBytes": "18446744073709551615", "IPEgressPackets": "18446744073709551615", "IPIngressBytes": "18446744073709551615", "IPIngressPackets": "18446744073709551615", "Id": "pmlogger.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestamp": "Tue 2024-11-19 14:38:53 EST", "InactiveEnterTimestampMonotonic": "526914183", "InactiveExitTimestamp": "Tue 2024-11-19 14:38:53 EST", "InactiveExitTimestampMonotonic": "526916788", "InvocationID": "7c1fc5e932644c43bde7be0e813ae809", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "0", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "65536", "LimitMEMLOCKSoft": "65536", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "262144", "LimitNOFILESoft": "1024", "LimitNPROC": "14003", "LimitNPROCSoft": "14003", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "14003", "LimitSIGPENDINGSoft": "14003", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "34408", "MemoryAccounting": "yes", "MemoryCurrent": "3796992", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "MountFlags": "", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAMask": "", "NUMAPolicy": "n/a", "Names": "pmlogger.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "all", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "PIDFile": "/run/pcp/pmlogger.pid", "PermissionsStartOnly": "no", "Perpetual": "no", "PrivateDevices": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "system.slice sysinit.target", "Restart": "always", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system.slice", "StandardError": "inherit", "StandardInput": "null", "StandardInputData": "", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestamp": "Tue 2024-11-19 14:38:54 EST", "StateChangeTimestampMonotonic": "527917875", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "running", "SuccessAction": "none", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "0", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "1", "TasksMax": "22405", "TimeoutStartUSec": "2min", "TimeoutStopUSec": "2min", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "enabled", "UtmpMode": "init", "WantedBy": "multi-user.target", "Wants": "pmcd.service", "WatchdogTimestamp": "Tue 2024-11-19 14:38:54 EST", "WatchdogTimestampMonotonic": "527917872", "WatchdogUSec": "0" } } ok: [managed-node3] => (item=pmie) => { "ansible_loop_var": "item", "changed": false, "item": "pmie", "name": "pmie", "state": "started", "status": { "ActiveEnterTimestamp": "Tue 2024-11-19 14:39:39 EST", "ActiveEnterTimestampMonotonic": "572126902", "ActiveExitTimestamp": "Tue 2024-11-19 14:39:38 EST", "ActiveExitTimestampMonotonic": "571664500", "ActiveState": "active", "After": "pmcd.service basic.target system.slice network-online.target systemd-journald.socket sysinit.target", "AllowIsolate": "no", "AllowedCPUs": "", "AllowedMemoryNodes": "", "AmbientCapabilities": "", "AssertResult": "yes", "AssertTimestamp": "Tue 2024-11-19 14:39:38 EST", "AssertTimestampMonotonic": "571900750", "Before": "multi-user.target shutdown.target pmie_daily.timer pmie_farm.service pmie_check.timer", "BindsTo": "pmie_daily.timer pmie_farm.service pmie_check.timer", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "no", "CPUAffinity": "", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Tue 2024-11-19 14:39:38 EST", "ConditionTimestampMonotonic": "571900749", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ConsistsOf": "pmie_farm.service", "ControlGroup": "/system.slice/pmie.service", "ControlPID": "0", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "Performance Metrics Inference Engine", "DevicePolicy": "auto", "Documentation": "man:pmie(1)", "DynamicUser": "no", "EffectiveCPUs": "", "EffectiveMemoryNodes": "", "Environment": "PMIE_CHECK_PARAMS=--only-primary", "EnvironmentFiles": "/etc/sysconfig/pmie (ignore_errors=yes)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "45423", "ExecMainStartTimestamp": "Tue 2024-11-19 14:39:39 EST", "ExecMainStartTimestampMonotonic": "572126880", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/libexec/pcp/lib/pmie ; argv[]=/usr/libexec/pcp/lib/pmie start-systemd ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStop": "{ path=/usr/libexec/pcp/lib/pmie ; argv[]=/usr/libexec/pcp/lib/pmie stop-systemd ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FragmentPath": "/usr/lib/systemd/system/pmie.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOSchedulingClass": "0", "IOSchedulingPriority": "0", "IOWeight": "[not set]", "IPAccounting": "no", "IPEgressBytes": "18446744073709551615", "IPEgressPackets": "18446744073709551615", "IPIngressBytes": "18446744073709551615", "IPIngressPackets": "18446744073709551615", "Id": "pmie.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestamp": "Tue 2024-11-19 14:39:38 EST", "InactiveEnterTimestampMonotonic": "571899587", "InactiveExitTimestamp": "Tue 2024-11-19 14:39:38 EST", "InactiveExitTimestampMonotonic": "571901918", "InvocationID": "61f52f7170ed43208e8a26b3c30b89f3", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "0", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "65536", "LimitMEMLOCKSoft": "65536", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "262144", "LimitNOFILESoft": "1024", "LimitNPROC": "14003", "LimitNPROCSoft": "14003", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "14003", "LimitSIGPENDINGSoft": "14003", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "45423", "MemoryAccounting": "yes", "MemoryCurrent": "1597440", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "MountFlags": "", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAMask": "", "NUMAPolicy": "n/a", "Names": "pmie.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "all", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "PIDFile": "/run/pcp/pmie.pid", "PermissionsStartOnly": "no", "Perpetual": "no", "PrivateDevices": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "system.slice sysinit.target", "Restart": "always", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system.slice", "StandardError": "inherit", "StandardInput": "null", "StandardInputData": "", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestamp": "Tue 2024-11-19 14:39:39 EST", "StateChangeTimestampMonotonic": "572126902", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "running", "SuccessAction": "none", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "0", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "1", "TasksMax": "22405", "TimeoutStartUSec": "2min", "TimeoutStopUSec": "2min", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "enabled", "UtmpMode": "init", "WantedBy": "multi-user.target", "Wants": "pmcd.service", "WatchdogTimestamp": "Tue 2024-11-19 14:39:39 EST", "WatchdogTimestampMonotonic": "572126899", "WatchdogUSec": "0" } } ok: [managed-node3] => (item=pmproxy) => { "ansible_loop_var": "item", "changed": false, "item": "pmproxy", "name": "pmproxy", "state": "started", "status": { "ActiveEnterTimestamp": "Tue 2024-11-19 14:38:53 EST", "ActiveEnterTimestampMonotonic": "526124272", "ActiveExitTimestamp": "Tue 2024-11-19 14:38:53 EST", "ActiveExitTimestampMonotonic": "526046694", "ActiveState": "active", "After": "pmcd.service system.slice sysinit.target basic.target network-online.target avahi-daemon.service systemd-journald.socket redis.service", "AllowIsolate": "no", "AllowedCPUs": "", "AllowedMemoryNodes": "", "AmbientCapabilities": "", "AssertResult": "yes", "AssertTimestamp": "Tue 2024-11-19 14:38:53 EST", "AssertTimestampMonotonic": "526055244", "Before": "multi-user.target shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "no", "CPUAffinity": "", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Tue 2024-11-19 14:38:53 EST", "ConditionTimestampMonotonic": "526055242", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroup": "/system.slice/pmproxy.service", "ControlPID": "0", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "Proxy for Performance Metrics Collector Daemon", "DevicePolicy": "auto", "Documentation": "man:pmproxy(1)", "DynamicUser": "no", "EffectiveCPUs": "", "EffectiveMemoryNodes": "", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "33869", "ExecMainStartTimestamp": "Tue 2024-11-19 14:38:53 EST", "ExecMainStartTimestampMonotonic": "526056381", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/libexec/pcp/lib/pmproxy ; argv[]=/usr/libexec/pcp/lib/pmproxy start-systemd ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FragmentPath": "/usr/lib/systemd/system/pmproxy.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOSchedulingClass": "0", "IOSchedulingPriority": "0", "IOWeight": "[not set]", "IPAccounting": "no", "IPEgressBytes": "18446744073709551615", "IPEgressPackets": "18446744073709551615", "IPIngressBytes": "18446744073709551615", "IPIngressPackets": "18446744073709551615", "Id": "pmproxy.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestamp": "Tue 2024-11-19 14:38:53 EST", "InactiveEnterTimestampMonotonic": "526054276", "InactiveExitTimestamp": "Tue 2024-11-19 14:38:53 EST", "InactiveExitTimestampMonotonic": "526056432", "InvocationID": "a3ebdd486bde4426b071790a238545a2", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "0", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "65536", "LimitMEMLOCKSoft": "65536", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "262144", "LimitNOFILESoft": "1024", "LimitNPROC": "14003", "LimitNPROCSoft": "14003", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "14003", "LimitSIGPENDINGSoft": "14003", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "33869", "MemoryAccounting": "yes", "MemoryCurrent": "44310528", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "MountFlags": "", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAMask": "", "NUMAPolicy": "n/a", "Names": "pmproxy.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "all", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "PermissionsStartOnly": "no", "Perpetual": "no", "PrivateDevices": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "system.slice sysinit.target", "Restart": "always", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system.slice", "StandardError": "inherit", "StandardInput": "null", "StandardInputData": "", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestamp": "Tue 2024-11-19 14:38:53 EST", "StateChangeTimestampMonotonic": "526124272", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "running", "SuccessAction": "none", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "0", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "1", "TasksMax": "22405", "TimeoutStartUSec": "1min 30s", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "enabled", "UtmpMode": "init", "WantedBy": "multi-user.target", "WatchdogTimestamp": "Tue 2024-11-19 14:38:53 EST", "WatchdogTimestampMonotonic": "526124269", "WatchdogUSec": "0" } } ok: [managed-node3] => (item=redis) => { "ansible_loop_var": "item", "changed": false, "item": "redis", "name": "redis", "state": "started", "status": { "ActiveEnterTimestamp": "Tue 2024-11-19 14:38:52 EST", "ActiveEnterTimestampMonotonic": "525349390", "ActiveExitTimestamp": "Tue 2024-11-19 14:38:52 EST", "ActiveExitTimestampMonotonic": "525309967", "ActiveState": "active", "After": "system.slice systemd-journald.socket basic.target network.target sysinit.target -.mount", "AllowIsolate": "no", "AllowedCPUs": "", "AllowedMemoryNodes": "", "AmbientCapabilities": "", "AssertResult": "yes", "AssertTimestamp": "Tue 2024-11-19 14:38:52 EST", "AssertTimestampMonotonic": "525338420", "Before": "multi-user.target pmproxy.service shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "no", "CPUAffinity": "", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Tue 2024-11-19 14:38:52 EST", "ConditionTimestampMonotonic": "525338419", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroup": "/system.slice/redis.service", "ControlPID": "0", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "Redis persistent key-value database", "DevicePolicy": "auto", "DropInPaths": "/etc/systemd/system/redis.service.d/limit.conf", "DynamicUser": "no", "EffectiveCPUs": "", "EffectiveMemoryNodes": "", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "33658", "ExecMainStartTimestamp": "Tue 2024-11-19 14:38:52 EST", "ExecMainStartTimestampMonotonic": "525339503", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/redis-server ; argv[]=/usr/bin/redis-server /etc/redis.conf --supervised systemd ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStop": "{ path=/usr/libexec/redis-shutdown ; argv[]=/usr/libexec/redis-shutdown ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FragmentPath": "/usr/lib/systemd/system/redis.service", "FreezerState": "running", "GID": "990", "Group": "redis", "GuessMainPID": "yes", "IOAccounting": "no", "IOSchedulingClass": "0", "IOSchedulingPriority": "0", "IOWeight": "[not set]", "IPAccounting": "no", "IPEgressBytes": "18446744073709551615", "IPEgressPackets": "18446744073709551615", "IPIngressBytes": "18446744073709551615", "IPIngressPackets": "18446744073709551615", "Id": "redis.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestamp": "Tue 2024-11-19 14:38:52 EST", "InactiveEnterTimestampMonotonic": "525337426", "InactiveExitTimestamp": "Tue 2024-11-19 14:38:52 EST", "InactiveExitTimestampMonotonic": "525339544", "InvocationID": "2c8bc08d3272449486f3a6f6b38f9b49", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "0", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "65536", "LimitMEMLOCKSoft": "65536", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "10240", "LimitNOFILESoft": "10240", "LimitNPROC": "14003", "LimitNPROCSoft": "14003", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "14003", "LimitSIGPENDINGSoft": "14003", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "33658", "MemoryAccounting": "yes", "MemoryCurrent": "12500992", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "MountFlags": "", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAMask": "", "NUMAPolicy": "n/a", "Names": "redis.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "main", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "PermissionsStartOnly": "no", "Perpetual": "no", "PrivateDevices": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "system.slice sysinit.target -.mount", "RequiresMountsFor": "/run/redis", "Restart": "no", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectory": "redis", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system.slice", "StandardError": "inherit", "StandardInput": "null", "StandardInputData": "", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestamp": "Tue 2024-11-19 14:38:52 EST", "StateChangeTimestampMonotonic": "525349390", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "running", "SuccessAction": "none", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "0", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "4", "TasksMax": "22405", "TimeoutStartUSec": "1min 30s", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "993", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "enabled", "User": "redis", "UtmpMode": "init", "WantedBy": "multi-user.target", "WatchdogTimestamp": "Tue 2024-11-19 14:38:52 EST", "WatchdogTimestampMonotonic": "525349388", "WatchdogUSec": "0" } } skipping: [managed-node3] => (item=valkey) => { "ansible_loop_var": "item", "changed": false, "false_condition": "item + '.service' in final_state.ansible_facts.services", "item": "valkey", "skip_reason": "Conditional result was False" } ok: [managed-node3] => (item=grafana-server) => { "ansible_loop_var": "item", "changed": false, "item": "grafana-server", "name": "grafana-server", "state": "started", "status": { "ActiveEnterTimestamp": "Tue 2024-11-19 14:38:56 EST", "ActiveEnterTimestampMonotonic": "530002109", "ActiveExitTimestamp": "Tue 2024-11-19 14:38:56 EST", "ActiveExitTimestampMonotonic": "529102114", "ActiveState": "active", "After": "postgresql.service basic.target mariadb.service systemd-tmpfiles-setup.service network-online.target sysinit.target mysqld.service tmp.mount -.mount systemd-journald.socket system.slice", "AllowIsolate": "no", "AllowedCPUs": "", "AllowedMemoryNodes": "", "AmbientCapabilities": "", "AssertResult": "yes", "AssertTimestamp": "Tue 2024-11-19 14:38:56 EST", "AssertTimestampMonotonic": "529148785", "Before": "multi-user.target shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "no", "CPUAffinity": "", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Tue 2024-11-19 14:38:56 EST", "ConditionTimestampMonotonic": "529148784", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroup": "/system.slice/grafana-server.service", "ControlPID": "0", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "Grafana instance", "DevicePolicy": "closed", "Documentation": "http://docs.grafana.org", "DynamicUser": "no", "EffectiveCPUs": "", "EffectiveMemoryNodes": "", "EnvironmentFiles": "/etc/sysconfig/grafana-server (ignore_errors=no)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "34903", "ExecMainStartTimestamp": "Tue 2024-11-19 14:38:56 EST", "ExecMainStartTimestampMonotonic": "529150395", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/sbin/grafana-server ; argv[]=/usr/sbin/grafana-server --config=${CONF_FILE} --pidfile=${PID_FILE_DIR}/grafana-server.pid --packaging=rpm cfg:default.paths.logs=${LOG_DIR} cfg:default.paths.data=${DATA_DIR} cfg:default.paths.plugins=${PLUGINS_DIR} cfg:default.paths.provisioning=${PROVISIONING_CFG_DIR} ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FragmentPath": "/usr/lib/systemd/system/grafana-server.service", "FreezerState": "running", "GID": "988", "Group": "grafana", "GuessMainPID": "yes", "IOAccounting": "no", "IOSchedulingClass": "0", "IOSchedulingPriority": "0", "IOWeight": "[not set]", "IPAccounting": "no", "IPEgressBytes": "18446744073709551615", "IPEgressPackets": "18446744073709551615", "IPIngressBytes": "18446744073709551615", "IPIngressPackets": "18446744073709551615", "Id": "grafana-server.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestamp": "Tue 2024-11-19 14:38:56 EST", "InactiveEnterTimestampMonotonic": "529147294", "InactiveExitTimestamp": "Tue 2024-11-19 14:38:56 EST", "InactiveExitTimestampMonotonic": "529150440", "InvocationID": "e1343c787b8d4eac8e04d430f667f8e0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "0", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "65536", "LimitMEMLOCKSoft": "65536", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "10000", "LimitNOFILESoft": "10000", "LimitNPROC": "14003", "LimitNPROCSoft": "14003", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "14003", "LimitSIGPENDINGSoft": "14003", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "yes", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "34903", "MemoryAccounting": "yes", "MemoryCurrent": "63287296", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "MountFlags": "", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAMask": "", "NUMAPolicy": "n/a", "Names": "grafana-server.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "yes", "NonBlocking": "no", "NotifyAccess": "main", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "PermissionsStartOnly": "no", "Perpetual": "no", "PrivateDevices": "yes", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "yes", "PrivateUsers": "no", "ProtectControlGroups": "yes", "ProtectHome": "yes", "ProtectKernelModules": "yes", "ProtectKernelTunables": "yes", "ProtectSystem": "full", "RefuseManualStart": "no", "RefuseManualStop": "no", "RemainAfterExit": "no", "RemoveIPC": "yes", "Requires": "-.mount sysinit.target system.slice", "RequiresMountsFor": "/run/grafana /usr/share/grafana /var/tmp", "Restart": "on-failure", "RestartUSec": "100ms", "RestrictNamespaces": "yes", "RestrictRealtime": "yes", "RestrictSUIDSGID": "yes", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectory": "grafana", "RuntimeDirectoryMode": "0750", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system.slice", "StandardError": "inherit", "StandardInput": "null", "StandardInputData": "", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestamp": "Tue 2024-11-19 14:38:56 EST", "StateChangeTimestampMonotonic": "530002109", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "running", "SuccessAction": "none", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallArchitectures": "native", "SystemCallErrorNumber": "0", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "13", "TasksMax": "22405", "TimeoutStartUSec": "1min 30s", "TimeoutStopUSec": "20s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "988", "UMask": "0027", "UnitFilePreset": "disabled", "UnitFileState": "enabled", "User": "grafana", "UtmpMode": "init", "WantedBy": "multi-user.target", "Wants": "network-online.target", "WatchdogTimestamp": "Tue 2024-11-19 14:38:56 EST", "WatchdogTimestampMonotonic": "530002105", "WatchdogUSec": "0", "WorkingDirectory": "/usr/share/grafana" } } TASK [Stop firewall] *********************************************************** task path: /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/tests/metrics/restore_services_state.yml:29 Tuesday 19 November 2024 14:40:38 -0500 (0:00:04.050) 0:01:38.885 ****** changed: [managed-node3] => { "changed": true, "name": "firewalld", "state": "stopped", "status": { "ActiveEnterTimestamp": "Tue 2024-11-19 14:40:01 EST", "ActiveEnterTimestampMonotonic": "594967987", "ActiveExitTimestampMonotonic": "0", "ActiveState": "active", "After": "basic.target polkit.service system.slice dbus.service dbus.socket sysinit.target", "AllowIsolate": "no", "AllowedCPUs": "", "AllowedMemoryNodes": "", "AmbientCapabilities": "", "AssertResult": "yes", "AssertTimestamp": "Tue 2024-11-19 14:40:01 EST", "AssertTimestampMonotonic": "594664287", "Before": "network-pre.target multi-user.target shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "BusName": "org.fedoraproject.FirewallD1", "CPUAccounting": "no", "CPUAffinity": "", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "yes", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Tue 2024-11-19 14:40:01 EST", "ConditionTimestampMonotonic": "594664285", "ConfigurationDirectoryMode": "0755", "Conflicts": "ip6tables.service iptables.service nftables.service shutdown.target ebtables.service ipset.service", "ControlGroup": "/system.slice/firewalld.service", "ControlPID": "0", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "firewalld - dynamic firewall daemon", "DevicePolicy": "auto", "Documentation": "man:firewalld(1)", "DynamicUser": "no", "EffectiveCPUs": "", "EffectiveMemoryNodes": "", "EnvironmentFiles": "/etc/sysconfig/firewalld (ignore_errors=yes)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "49851", "ExecMainStartTimestamp": "Tue 2024-11-19 14:40:01 EST", "ExecMainStartTimestampMonotonic": "594665951", "ExecMainStatus": "0", "ExecReload": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStart": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; ignore_errors=no ; start_time=[Tue 2024-11-19 14:40:01 EST] ; stop_time=[n/a] ; pid=49851 ; code=(null) ; status=0/0 }", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FragmentPath": "/usr/lib/systemd/system/firewalld.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOSchedulingClass": "0", "IOSchedulingPriority": "0", "IOWeight": "[not set]", "IPAccounting": "no", "IPEgressBytes": "18446744073709551615", "IPEgressPackets": "18446744073709551615", "IPIngressBytes": "18446744073709551615", "IPIngressPackets": "18446744073709551615", "Id": "firewalld.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestamp": "Tue 2024-11-19 14:40:01 EST", "InactiveExitTimestampMonotonic": "594665999", "InvocationID": "c15fa1a159a443aca38792967b64aab2", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "0", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "65536", "LimitMEMLOCKSoft": "65536", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "262144", "LimitNOFILESoft": "1024", "LimitNPROC": "14003", "LimitNPROCSoft": "14003", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "14003", "LimitSIGPENDINGSoft": "14003", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "49851", "MemoryAccounting": "yes", "MemoryCurrent": "40488960", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "MountFlags": "", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAMask": "", "NUMAPolicy": "n/a", "Names": "dbus-org.fedoraproject.FirewallD1.service firewalld.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "PermissionsStartOnly": "no", "Perpetual": "no", "PrivateDevices": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "sysinit.target system.slice dbus.socket", "Restart": "no", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system.slice", "StandardError": "null", "StandardInput": "null", "StandardInputData": "", "StandardOutput": "null", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestamp": "Tue 2024-11-19 14:40:01 EST", "StateChangeTimestampMonotonic": "594967987", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "running", "SuccessAction": "none", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "0", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "2", "TasksMax": "22405", "TimeoutStartUSec": "1min 30s", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "dbus", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "enabled", "UnitFileState": "enabled", "UtmpMode": "init", "WantedBy": "multi-user.target", "Wants": "network-pre.target", "WatchdogTimestamp": "Tue 2024-11-19 14:40:01 EST", "WatchdogTimestampMonotonic": "594967984", "WatchdogUSec": "0" } } PLAY RECAP ********************************************************************* managed-node3 : ok=117 changed=6 unreachable=0 failed=1 skipped=65 rescued=1 ignored=0 Tuesday 19 November 2024 14:40:39 -0500 (0:00:01.037) 0:01:39.923 ****** =============================================================================== fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure extra performance rules are installed for targeted hosts --- 5.18s /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmie.yml:24 Restore state of services ----------------------------------------------- 4.05s /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/tests/metrics/restore_services_state.yml:9 fedora.linux_system_roles.selinux : Get SELinux modules facts ----------- 3.66s /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:112 fedora.linux_system_roles.firewall : Configure firewall ----------------- 3.41s /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:71 fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure extra rules symlinks have been created for targeted hosts --- 3.20s /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmie.yml:86 fedora.linux_system_roles.private_metrics_subrole_keyserver : Install key server packages --- 3.19s /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_keyserver/tasks/main.yml:31 fedora.linux_system_roles.firewall : Install firewalld ------------------ 3.17s /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:31 fedora.linux_system_roles.selinux : Install SELinux python3 tools ------- 3.07s /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:35 fedora.linux_system_roles.selinux : Install SELinux tool semanage ------- 3.06s /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:58 fedora.linux_system_roles.private_metrics_subrole_grafana : Install Grafana packages --- 3.06s /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_grafana/tasks/main.yml:27 fedora.linux_system_roles.private_metrics_subrole_pcp : Install authentication packages --- 3.03s /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/main.yml:33 fedora.linux_system_roles.private_metrics_subrole_pcp : Install Performance Co-Pilot packages --- 3.01s /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/main.yml:27 fedora.linux_system_roles.private_metrics_subrole_grafana : Get package facts now that Grafana is installed --- 2.11s /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_grafana/tasks/main.yml:33 fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure extra performance rule group directories exist --- 2.09s /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmie.yml:4 fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure extra performance rule group link directories exist --- 2.02s /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmie.yml:14 Get initial state of services ------------------------------------------- 1.93s /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/tests/metrics/get_services_state.yml:3 Get final state of services --------------------------------------------- 1.85s /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/tests/metrics/restore_services_state.yml:3 Gathering Facts --------------------------------------------------------- 1.36s /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/tests/metrics/tests_verify_fullstack.yml:9 fedora.linux_system_roles.firewall : Enable and start firewalld service --- 1.26s /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:28 fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure performance metric inference is restarted and enabled on boot --- 1.22s /tmp/collections-h0K/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmie.yml:127