ansible-playbook [core 2.17.5] config file = None configured module search path = ['/root/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules'] ansible python module location = /usr/local/lib/python3.12/site-packages/ansible ansible collection location = /tmp/collections-9Uu executable location = /usr/local/bin/ansible-playbook python version = 3.12.6 (main, Sep 9 2024, 00:00:00) [GCC 14.2.1 20240801 (Red Hat 14.2.1-1)] (/usr/bin/python3.12) jinja version = 3.1.4 libyaml = True No config file found; using defaults running playbook inside collection fedora.linux_system_roles redirecting (type: callback) ansible.builtin.debug to ansible.posix.debug redirecting (type: callback) ansible.builtin.debug to ansible.posix.debug redirecting (type: callback) ansible.builtin.profile_tasks to ansible.posix.profile_tasks Skipping callback 'default', as we already have a stdout callback. Skipping callback 'minimal', as we already have a stdout callback. Skipping callback 'oneline', as we already have a stdout callback. PLAYBOOK: tests_share_system_dir.yml ******************************************* 1 plays in /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/tests/nbde_server/tests_share_system_dir.yml PLAY [Ensure that the role can share tangd.socket.d directory with other files] *** TASK [Create the tangd.socket.d directory] ************************************* task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/tests/nbde_server/tests_share_system_dir.yml:14 Saturday 02 November 2024 08:48:54 -0400 (0:00:00.013) 0:00:00.013 ***** [WARNING]: Platform linux on host managed-node3 is using the discovered Python interpreter at /usr/bin/python3.12, but future installation of another Python interpreter could change the meaning of that path. See https://docs.ansible.com/ansible- core/2.17/reference_appendices/interpreter_discovery.html for more information. changed: [managed-node3] => { "ansible_facts": { "discovered_interpreter_python": "/usr/bin/python3.12" }, "changed": true, "gid": 0, "group": "root", "mode": "0775", "owner": "root", "path": "/etc/systemd/system/tangd.socket.d", "secontext": "unconfined_u:object_r:systemd_unit_file_t:s0", "size": 6, "state": "directory", "uid": 0 } TASK [Create a customization systemd file] ************************************* task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/tests/nbde_server/tests_share_system_dir.yml:20 Saturday 02 November 2024 08:48:54 -0400 (0:00:00.708) 0:00:00.721 ***** changed: [managed-node3] => { "changed": true, "checksum": "05987691cc309e84627f31fa0d1680a3b3b2c4b2", "dest": "/etc/systemd/system/tangd.socket.d/override2.conf", "gid": 0, "group": "root", "md5sum": "fb9de2e8557683271457053efbe78252", "mode": "0664", "owner": "root", "secontext": "system_u:object_r:tangd_unit_file_t:s0", "size": 28, "src": "/root/.ansible/tmp/ansible-tmp-1730551734.7940044-10022-93758658161409/.source.conf", "state": "file", "uid": 0 } TASK [Run role] **************************************************************** task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/tests/nbde_server/tests_share_system_dir.yml:28 Saturday 02 November 2024 08:48:55 -0400 (0:00:00.888) 0:00:01.610 ***** included: fedora.linux_system_roles.nbde_server for managed-node3 TASK [fedora.linux_system_roles.nbde_server : Set version specific variables] *** task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/main.yml:6 Saturday 02 November 2024 08:48:55 -0400 (0:00:00.051) 0:00:01.662 ***** included: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/set_vars.yml for managed-node3 TASK [fedora.linux_system_roles.nbde_server : Ensure ansible_facts used by role] *** task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/set_vars.yml:2 Saturday 02 November 2024 08:48:55 -0400 (0:00:00.026) 0:00:01.689 ***** ok: [managed-node3] TASK [fedora.linux_system_roles.nbde_server : Check if system is ostree] ******* task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/set_vars.yml:10 Saturday 02 November 2024 08:48:56 -0400 (0:00:00.873) 0:00:02.562 ***** ok: [managed-node3] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.nbde_server : Set flag to indicate system is ostree] *** task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/set_vars.yml:15 Saturday 02 November 2024 08:48:56 -0400 (0:00:00.339) 0:00:02.902 ***** ok: [managed-node3] => { "ansible_facts": { "__nbde_server_is_ostree": false }, "changed": false } TASK [fedora.linux_system_roles.nbde_server : Set platform/version specific variables] *** task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/set_vars.yml:19 Saturday 02 November 2024 08:48:56 -0400 (0:00:00.022) 0:00:02.924 ***** ok: [managed-node3] => { "ansible_facts": { "__nbde_server_cachedir": "/var/cache/tang", "__nbde_server_group": "tang", "__nbde_server_keydir": "/var/db/tang", "__nbde_server_keygen": "/usr/libexec/tangd-keygen", "__nbde_server_packages": [ "tang" ], "__nbde_server_services": [ "tangd.socket" ], "__nbde_server_update": "/usr/libexec/tangd-update", "__nbde_server_user": "tang" }, "ansible_included_var_files": [ "/tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/nbde_server/vars/default.yml" ], "changed": false } TASK [fedora.linux_system_roles.nbde_server : Include the appropriate provider tasks] *** task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/main.yml:9 Saturday 02 November 2024 08:48:56 -0400 (0:00:00.023) 0:00:02.948 ***** included: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/main-tang.yml for managed-node3 TASK [fedora.linux_system_roles.nbde_server : Ensure tang is installed] ******** task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/main-tang.yml:2 Saturday 02 November 2024 08:48:56 -0400 (0:00:00.022) 0:00:02.970 ***** ok: [managed-node3] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: tang TASK [fedora.linux_system_roles.nbde_server : Ensure keys are rotated] ********* task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/main-tang.yml:8 Saturday 02 November 2024 08:48:58 -0400 (0:00:01.260) 0:00:04.231 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "nbde_server_rotate_keys | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.nbde_server : Ensure we have keys] ************* task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/main-tang.yml:17 Saturday 02 November 2024 08:48:58 -0400 (0:00:00.036) 0:00:04.268 ***** changed: [managed-node3] => { "arguments": { "cachedir": "/var/cache/tang", "force": false, "keydir": "/var/db/tang", "keygen": "/usr/libexec/tangd-keygen", "keys_to_deploy_dir": null, "state": "keys-created", "update": "/usr/libexec/tangd-update" }, "changed": true, "state": "keys-created" } TASK [fedora.linux_system_roles.nbde_server : Perform key management (fetch/deploy) tasks] *** task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/main-tang.yml:26 Saturday 02 November 2024 08:48:58 -0400 (0:00:00.481) 0:00:04.749 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "(nbde_server_fetch_keys | bool) or (nbde_server_deploy_keys | bool)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.nbde_server : Manage firewall and SELinux for port] *** task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/main-tang.yml:30 Saturday 02 November 2024 08:48:58 -0400 (0:00:00.043) 0:00:04.793 ***** included: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/tangd-custom-port.yml for managed-node3 TASK [Ensure tang port is labeled tangd_port_t for SELinux] ******************** task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/tangd-custom-port.yml:2 Saturday 02 November 2024 08:48:58 -0400 (0:00:00.030) 0:00:04.823 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "nbde_server_manage_selinux | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.nbde_server : Stat the tangd custom port systemd directory] *** task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/tangd-custom-port.yml:14 Saturday 02 November 2024 08:48:58 -0400 (0:00:00.035) 0:00:04.859 ***** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1730551734.688197, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "ctime": 1730551735.5871959, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 532676801, "isblk": false, "ischr": false, "isdir": true, "isfifo": false, "isgid": false, "islnk": false, "isreg": false, "issock": false, "isuid": false, "mimetype": "inode/directory", "mode": "0775", "mtime": 1730551735.5871959, "nlink": 2, "path": "/etc/systemd/system/tangd.socket.d", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 28, "uid": 0, "version": "2485065829", "wgrp": true, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.nbde_server : Get a list of files in the tangd custom directory] *** task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/tangd-custom-port.yml:19 Saturday 02 November 2024 08:48:59 -0400 (0:00:00.345) 0:00:05.204 ***** ok: [managed-node3] => { "changed": false, "examined": 1, "files": [ { "atime": 1730551735.5751958, "ctime": 1730551735.5881958, "dev": 51714, "gid": 0, "gr_name": "root", "inode": 536871107, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mode": "0664", "mtime": 1730551735.2071962, "nlink": 1, "path": "/etc/systemd/system/tangd.socket.d/override2.conf", "pw_name": "root", "rgrp": true, "roth": true, "rusr": true, "size": 28, "uid": 0, "wgrp": true, "woth": false, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } ], "matched": 1, "skipped_paths": {} } MSG: All paths examined TASK [fedora.linux_system_roles.nbde_server : Manage tangd custom port systemd directory] *** task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/tangd-custom-port.yml:35 Saturday 02 November 2024 08:48:59 -0400 (0:00:00.530) 0:00:05.735 ***** ok: [managed-node3] => { "changed": false, "gid": 0, "group": "root", "mode": "0775", "owner": "root", "path": "/etc/systemd/system/tangd.socket.d", "secontext": "unconfined_u:object_r:systemd_unit_file_t:s0", "size": 28, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.nbde_server : Creates the file with the port entry that we want tangd to listen to] *** task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/tangd-custom-port.yml:44 Saturday 02 November 2024 08:49:00 -0400 (0:00:00.393) 0:00:06.129 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "nbde_server_port | int != 80", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.nbde_server : Set flag to to tell main that the port has changed] *** task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/tangd-custom-port.yml:53 Saturday 02 November 2024 08:49:00 -0400 (0:00:00.049) 0:00:06.178 ***** ok: [managed-node3] => { "ansible_facts": { "__nbde_server_port_changed": false }, "changed": false } TASK [Ensure the desired port is added to firewalld] *************************** task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/tangd-custom-port.yml:57 Saturday 02 November 2024 08:49:00 -0400 (0:00:00.032) 0:00:06.211 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "nbde_server_manage_firewall | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.nbde_server : Reload the daemons so the new changes take effect] *** task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/main-tang.yml:34 Saturday 02 November 2024 08:49:00 -0400 (0:00:00.046) 0:00:06.257 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__nbde_server_port_changed | d(false) | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.nbde_server : Ensure required services are enabled and at the right state] *** task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/main-tang.yml:39 Saturday 02 November 2024 08:49:00 -0400 (0:00:00.046) 0:00:06.304 ***** ok: [managed-node3] => (item=tangd.socket) => { "ansible_loop_var": "item", "changed": false, "enabled": true, "item": "tangd.socket", "name": "tangd.socket", "state": "started", "status": { "Accept": "yes", "AccessSELinuxContext": "system_u:object_r:tangd_unit_file_t:s0", "ActiveEnterTimestamp": "Sat 2024-11-02 08:48:20 EDT", "ActiveEnterTimestampMonotonic": "300282438", "ActiveExitTimestampMonotonic": "0", "ActiveState": "active", "After": "systemd-journald.socket system.slice sysinit.target", "AllowIsolate": "no", "AssertResult": "yes", "AssertTimestamp": "Sat 2024-11-02 08:48:20 EDT", "AssertTimestampMonotonic": "300250725", "Backlog": "2147483647", "Before": "sockets.target shutdown.target", "BindIPv6Only": "default", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "Broadcast": "no", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "9965000", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "no", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Sat 2024-11-02 08:48:20 EDT", "ConditionTimestampMonotonic": "300250722", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroup": "/system.slice/tangd.socket", "ControlGroupId": "4482", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "DeferAcceptUSec": "0", "Delegate": "no", "Description": "Tang Server socket", "DevicePolicy": "auto", "DirectoryMode": "0755", "Documentation": "\"man:tang(8)\"", "DynamicUser": "no", "EffectiveMemoryHigh": "3698376704", "EffectiveMemoryMax": "3698376704", "EffectiveTasksMax": "22336", "ExecStartPre": "{ path=/usr/bin/chown ; argv[]=/usr/bin/chown -R tang:tang /var/db/tang ; ignore_errors=yes ; start_time=[Sat 2024-11-02 08:48:20 EDT] ; stop_time=[Sat 2024-11-02 08:48:20 EDT] ; pid=7346 ; code=exited ; status=0 }", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorName": "tangd.socket", "FinalKillSignal": "9", "FlushPending": "no", "FragmentPath": "/usr/lib/systemd/system/tangd.socket", "FreeBind": "no", "FreezerState": "running", "GID": "[not set]", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "IPTOS": "-1", "IPTTL": "-1", "Id": "tangd.socket", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestamp": "Sat 2024-11-02 08:48:20 EDT", "InactiveExitTimestampMonotonic": "300252693", "InvocationID": "9c19475b3a3f4d7bbe0045d3ec667fd4", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeepAlive": "no", "KeepAliveIntervalUSec": "0", "KeepAliveProbes": "0", "KeepAliveTimeUSec": "0", "KeyringMode": "shared", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13960", "LimitNPROCSoft": "13960", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13960", "LimitSIGPENDINGSoft": "13960", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "Listen": "[::]:80 (Stream)", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "Mark": "-1", "MaxConnections": "64", "MaxConnectionsPerSource": "0", "MemoryAccounting": "yes", "MemoryAvailable": "3274592256", "MemoryCurrent": "8192", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "1134592", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "0", "MemorySwapMax": "infinity", "MemorySwapPeak": "0", "MemoryZSwapCurrent": "0", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MessageQueueMaxMessages": "0", "MessageQueueMessageSize": "0", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NAccepted": "0", "NConnections": "0", "NRefused": "0", "NUMAPolicy": "n/a", "Names": "tangd.socket", "NeedDaemonReload": "no", "Nice": "0", "NoDelay": "no", "NoNewPrivileges": "no", "NonBlocking": "no", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "PassCredentials": "no", "PassFileDescriptorsToExec": "no", "PassPacketInfo": "no", "PassSecurity": "no", "Perpetual": "no", "PipeSize": "0", "PollLimitBurst": "150", "PollLimitIntervalUSec": "2s", "Priority": "-1", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "ReceiveBuffer": "0", "RefuseManualStart": "no", "RefuseManualStop": "no", "RemoveIPC": "no", "RemoveOnStop": "no", "Requires": "system.slice sysinit.target", "RestartKillSignal": "15", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "ReusePort": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "SameProcessGroup": "no", "SecureBits": "0", "SendBuffer": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "SocketMode": "0666", "SocketProtocol": "0", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestamp": "Sat 2024-11-02 08:48:20 EDT", "StateChangeTimestampMonotonic": "300282438", "StateDirectoryMode": "0755", "StopWhenUnneeded": "no", "SubState": "listening", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "0", "TasksMax": "22336", "TimeoutCleanUSec": "infinity", "TimeoutUSec": "1min 30s", "TimerSlackNSec": "50000", "Timestamping": "off", "Transient": "no", "Transparent": "no", "TriggerLimitBurst": "200", "TriggerLimitIntervalUSec": "2s", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "enabled", "UtmpMode": "init", "WantedBy": "sockets.target", "WatchdogSignal": "6", "Writable": "no" } } TASK [Check tangd socket dir] ************************************************** task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/tests/nbde_server/tests_share_system_dir.yml:33 Saturday 02 November 2024 08:49:01 -0400 (0:00:01.367) 0:00:07.672 ***** ok: [managed-node3] => { "changed": false, "failed_when_result": false, "stat": { "atime": 1730551739.7111914, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "ctime": 1730551735.5871959, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 532676801, "isblk": false, "ischr": false, "isdir": true, "isfifo": false, "isgid": false, "islnk": false, "isreg": false, "issock": false, "isuid": false, "mimetype": "inode/directory", "mode": "0775", "mtime": 1730551735.5871959, "nlink": 2, "path": "/etc/systemd/system/tangd.socket.d", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 28, "uid": 0, "version": "2485065829", "wgrp": true, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [Check custom file] ******************************************************* task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/tests/nbde_server/tests_share_system_dir.yml:39 Saturday 02 November 2024 08:49:02 -0400 (0:00:00.381) 0:00:08.053 ***** ok: [managed-node3] => { "changed": false, "failed_when_result": false, "stat": { "atime": 1730551741.6001894, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "05987691cc309e84627f31fa0d1680a3b3b2c4b2", "ctime": 1730551735.5881958, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 536871107, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0664", "mtime": 1730551735.2071962, "nlink": 1, "path": "/etc/systemd/system/tangd.socket.d/override2.conf", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 28, "uid": 0, "version": "709332064", "wgrp": true, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [Verify role reported no changes] ***************************************** task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/tests/nbde_server/tests_share_system_dir.yml:45 Saturday 02 November 2024 08:49:02 -0400 (0:00:00.379) 0:00:08.433 ***** ok: [managed-node3] => { "changed": false } MSG: All assertions passed TASK [Run the role with a custom port] ***************************************** task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/tests/nbde_server/tests_share_system_dir.yml:49 Saturday 02 November 2024 08:49:02 -0400 (0:00:00.022) 0:00:08.455 ***** included: fedora.linux_system_roles.nbde_server for managed-node3 TASK [fedora.linux_system_roles.nbde_server : Set version specific variables] *** task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/main.yml:6 Saturday 02 November 2024 08:49:02 -0400 (0:00:00.024) 0:00:08.480 ***** included: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/set_vars.yml for managed-node3 TASK [fedora.linux_system_roles.nbde_server : Ensure ansible_facts used by role] *** task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/set_vars.yml:2 Saturday 02 November 2024 08:49:02 -0400 (0:00:00.015) 0:00:08.496 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__nbde_server_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.nbde_server : Check if system is ostree] ******* task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/set_vars.yml:10 Saturday 02 November 2024 08:49:02 -0400 (0:00:00.032) 0:00:08.528 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __nbde_server_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.nbde_server : Set flag to indicate system is ostree] *** task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/set_vars.yml:15 Saturday 02 November 2024 08:49:02 -0400 (0:00:00.019) 0:00:08.547 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __nbde_server_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.nbde_server : Set platform/version specific variables] *** task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/set_vars.yml:19 Saturday 02 November 2024 08:49:02 -0400 (0:00:00.017) 0:00:08.565 ***** ok: [managed-node3] => { "ansible_facts": { "__nbde_server_cachedir": "/var/cache/tang", "__nbde_server_group": "tang", "__nbde_server_keydir": "/var/db/tang", "__nbde_server_keygen": "/usr/libexec/tangd-keygen", "__nbde_server_packages": [ "tang" ], "__nbde_server_services": [ "tangd.socket" ], "__nbde_server_update": "/usr/libexec/tangd-update", "__nbde_server_user": "tang" }, "ansible_included_var_files": [ "/tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/nbde_server/vars/default.yml" ], "changed": false } TASK [fedora.linux_system_roles.nbde_server : Include the appropriate provider tasks] *** task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/main.yml:9 Saturday 02 November 2024 08:49:02 -0400 (0:00:00.023) 0:00:08.588 ***** included: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/main-tang.yml for managed-node3 TASK [fedora.linux_system_roles.nbde_server : Ensure tang is installed] ******** task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/main-tang.yml:2 Saturday 02 November 2024 08:49:02 -0400 (0:00:00.020) 0:00:08.608 ***** ok: [managed-node3] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: tang TASK [fedora.linux_system_roles.nbde_server : Ensure keys are rotated] ********* task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/main-tang.yml:8 Saturday 02 November 2024 08:49:03 -0400 (0:00:01.071) 0:00:09.679 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "nbde_server_rotate_keys | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.nbde_server : Ensure we have keys] ************* task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/main-tang.yml:17 Saturday 02 November 2024 08:49:03 -0400 (0:00:00.028) 0:00:09.708 ***** ok: [managed-node3] => { "arguments": { "cachedir": "/var/cache/tang", "force": false, "keydir": "/var/db/tang", "keygen": "/usr/libexec/tangd-keygen", "keys_to_deploy_dir": null, "state": "keys-created", "update": "/usr/libexec/tangd-update" }, "changed": false, "state": "keys-created" } TASK [fedora.linux_system_roles.nbde_server : Perform key management (fetch/deploy) tasks] *** task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/main-tang.yml:26 Saturday 02 November 2024 08:49:04 -0400 (0:00:00.346) 0:00:10.054 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "(nbde_server_fetch_keys | bool) or (nbde_server_deploy_keys | bool)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.nbde_server : Manage firewall and SELinux for port] *** task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/main-tang.yml:30 Saturday 02 November 2024 08:49:04 -0400 (0:00:00.029) 0:00:10.084 ***** included: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/tangd-custom-port.yml for managed-node3 TASK [Ensure tang port is labeled tangd_port_t for SELinux] ******************** task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/tangd-custom-port.yml:2 Saturday 02 November 2024 08:49:04 -0400 (0:00:00.024) 0:00:10.109 ***** redirecting (type: modules) ansible.builtin.selinux to ansible.posix.selinux redirecting (type: modules) ansible.builtin.selinux to ansible.posix.selinux redirecting (type: modules) ansible.builtin.seboolean to ansible.posix.seboolean included: fedora.linux_system_roles.selinux for managed-node3 TASK [fedora.linux_system_roles.selinux : Set ansible_facts required by role and install packages] *** task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:2 Saturday 02 November 2024 08:49:04 -0400 (0:00:00.105) 0:00:10.214 ***** included: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/set_facts_packages.yml for managed-node3 TASK [fedora.linux_system_roles.selinux : Ensure ansible_facts used by role] *** task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/set_facts_packages.yml:2 Saturday 02 November 2024 08:49:04 -0400 (0:00:00.036) 0:00:10.251 ***** ok: [managed-node3] TASK [fedora.linux_system_roles.selinux : Ensure SELinux packages] ************* task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/set_facts_packages.yml:7 Saturday 02 November 2024 08:49:04 -0400 (0:00:00.530) 0:00:10.781 ***** included: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml for managed-node3 TASK [fedora.linux_system_roles.selinux : Check if system is ostree] *********** task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:5 Saturday 02 November 2024 08:49:04 -0400 (0:00:00.045) 0:00:10.827 ***** ok: [managed-node3] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.selinux : Set flag to indicate system is ostree] *** task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:10 Saturday 02 November 2024 08:49:05 -0400 (0:00:00.358) 0:00:11.185 ***** ok: [managed-node3] => { "ansible_facts": { "__selinux_is_ostree": false }, "changed": false } TASK [fedora.linux_system_roles.selinux : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:17 Saturday 02 November 2024 08:49:05 -0400 (0:00:00.023) 0:00:11.208 ***** ok: [managed-node3] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.selinux : Set flag if transactional-update exists] *** task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:22 Saturday 02 November 2024 08:49:05 -0400 (0:00:00.332) 0:00:11.540 ***** ok: [managed-node3] => { "ansible_facts": { "__selinux_is_transactional": false }, "changed": false } TASK [fedora.linux_system_roles.selinux : Install SELinux python2 tools] ******* task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:26 Saturday 02 November 2024 08:49:05 -0400 (0:00:00.023) 0:00:11.563 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "ansible_python_version is version('3', '<')", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Install SELinux python3 tools] ******* task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:35 Saturday 02 November 2024 08:49:05 -0400 (0:00:00.017) 0:00:11.580 ***** ok: [managed-node3] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: python3-libselinux python3-policycoreutils TASK [fedora.linux_system_roles.selinux : Install SELinux python3 tools] ******* task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:46 Saturday 02 November 2024 08:49:06 -0400 (0:00:01.038) 0:00:12.619 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "ansible_os_family == \"Suse\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Install SELinux tool semanage] ******* task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:58 Saturday 02 November 2024 08:49:06 -0400 (0:00:00.018) 0:00:12.638 ***** changed: [managed-node3] => { "changed": true, "rc": 0, "results": [ "Installed: policycoreutils-python-utils-3.7-2.el10.noarch" ] } lsrpackages: policycoreutils-python-utils TASK [fedora.linux_system_roles.selinux : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:72 Saturday 02 November 2024 08:49:08 -0400 (0:00:01.431) 0:00:14.069 ***** skipping: [managed-node3] => { "false_condition": "__selinux_is_transactional | d(false)" } TASK [fedora.linux_system_roles.selinux : Reboot transactional update systems] *** task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:77 Saturday 02 November 2024 08:49:08 -0400 (0:00:00.033) 0:00:14.102 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__selinux_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Fail if reboot is needed and not set] *** task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:82 Saturday 02 November 2024 08:49:08 -0400 (0:00:00.032) 0:00:14.135 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__selinux_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Refresh facts] *********************** task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:89 Saturday 02 November 2024 08:49:08 -0400 (0:00:00.031) 0:00:14.166 ***** ok: [managed-node3] TASK [fedora.linux_system_roles.selinux : Set permanent SELinux state if enabled] *** task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:5 Saturday 02 November 2024 08:49:09 -0400 (0:00:00.882) 0:00:15.048 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "ansible_selinux.status == \"enabled\" and (selinux_state or selinux_policy)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Set permanent SELinux state if disabled] *** task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:13 Saturday 02 November 2024 08:49:09 -0400 (0:00:00.013) 0:00:15.062 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "ansible_selinux.status == \"disabled\" and selinux_state", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Set selinux_reboot_required] ********* task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:21 Saturday 02 November 2024 08:49:09 -0400 (0:00:00.012) 0:00:15.075 ***** ok: [managed-node3] => { "ansible_facts": { "selinux_reboot_required": false }, "changed": false } TASK [fedora.linux_system_roles.selinux : Fail if reboot is required] ********** task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:25 Saturday 02 November 2024 08:49:09 -0400 (0:00:00.038) 0:00:15.113 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "selinux_reboot_required", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Warn if SELinux is disabled] ********* task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:30 Saturday 02 November 2024 08:49:09 -0400 (0:00:00.012) 0:00:15.126 ***** skipping: [managed-node3] => { "false_condition": "ansible_selinux.status == \"disabled\"" } TASK [fedora.linux_system_roles.selinux : Drop all local modifications] ******** task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:35 Saturday 02 November 2024 08:49:09 -0400 (0:00:00.012) 0:00:15.139 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "selinux_all_purge | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Purge all SELinux boolean local modifications] *** task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:43 Saturday 02 November 2024 08:49:09 -0400 (0:00:00.029) 0:00:15.169 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "selinux_booleans_purge | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Purge all SELinux file context local modifications] *** task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:48 Saturday 02 November 2024 08:49:09 -0400 (0:00:00.029) 0:00:15.198 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "selinux_fcontexts_purge | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Purge all SELinux port local modifications] *** task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:53 Saturday 02 November 2024 08:49:09 -0400 (0:00:00.029) 0:00:15.227 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "selinux_ports_purge | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Purge all SELinux login local modifications] *** task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:58 Saturday 02 November 2024 08:49:09 -0400 (0:00:00.029) 0:00:15.256 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "selinux_logins_purge | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Set SELinux booleans] **************** task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:63 Saturday 02 November 2024 08:49:09 -0400 (0:00:00.028) 0:00:15.285 ***** skipping: [managed-node3] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.selinux : Set SELinux file contexts] *********** task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:74 Saturday 02 November 2024 08:49:09 -0400 (0:00:00.011) 0:00:15.297 ***** skipping: [managed-node3] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.selinux : Set an SELinux label on a port] ****** task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:87 Saturday 02 November 2024 08:49:09 -0400 (0:00:00.012) 0:00:15.310 ***** changed: [managed-node3] => (item={'ports': 7500, 'proto': 'tcp', 'setype': 'tangd_port_t', 'state': 'present', 'local': True}) => { "__selinux_item": { "local": true, "ports": 7500, "proto": "tcp", "setype": "tangd_port_t", "state": "present" }, "ansible_loop_var": "__selinux_item", "changed": true, "ports": [ "7500" ], "proto": "tcp", "setype": "tangd_port_t", "state": "present" } TASK [fedora.linux_system_roles.selinux : Set linux user to SELinux user mapping] *** task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:99 Saturday 02 November 2024 08:49:11 -0400 (0:00:02.637) 0:00:17.948 ***** skipping: [managed-node3] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.selinux : Get SELinux modules facts] *********** task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:112 Saturday 02 November 2024 08:49:11 -0400 (0:00:00.013) 0:00:17.961 ***** ok: [managed-node3] => { "ansible_facts": { "selinux_checksums": true, "selinux_installed_modules": { "abrt": { "100": { "checksum": "sha256:98d1c8c12a0f58b637b3864dec2f3d03281e88e541f41a5e19563a5447ba020c", "enabled": 1 } }, "accountsd": { "100": { "checksum": "sha256:bf97d40f7ec7e4318b7d31bfd664735627b76b6e616a0cff06c15040960620ec", "enabled": 1 } }, "acct": { "100": { "checksum": "sha256:da4a2c785c040f6c34a3e602eeb1ed612a74ac170b7f5089d8ba55e12f750818", "enabled": 1 } }, "afs": { "100": { "checksum": "sha256:fb7e253fdbe3d08b3929bb97c3b6d52f56123bd4759aa1008c667abd2021b2aa", "enabled": 1 } }, "afterburn": { "100": { "checksum": "sha256:86bb7a2815cf9d64e9efd2c917153b238cbc60d7d03abac54f428bf676e4bdb6", "enabled": 1 } }, "aiccu": { "100": { "checksum": "sha256:836a35abbbf400c117eae70a77a4596a73b3d548e6fa82e3a0f55ae96f21d64b", "enabled": 1 } }, "aide": { "100": { "checksum": "sha256:4f387a45821ab709a325b7ccd4af3a4951c231c34440b5fa877b8fc6387d2aff", "enabled": 1 } }, "ajaxterm": { "100": { "checksum": "sha256:6e3edee489b6c939e15c49b4819463a7021d6188eaff9b05e9c8867de1f018fd", "enabled": 1 } }, "alsa": { "100": { "checksum": "sha256:9de454604bdef8775f6ebae39b8bc4ba7b5e79a2972d3e32ca7d50abffa7a85a", "enabled": 1 } }, "amanda": { "100": { "checksum": "sha256:133a6a6da4b4ab3b13dd255bd7974bb6e0494fa7c5e0b1034b7926823d6621a2", "enabled": 1 } }, "amtu": { "100": { "checksum": "sha256:80ccb9e85608a483fa07c37c2772933f5e63025bcc667540bec377b02bde2f3c", "enabled": 1 } }, "anaconda": { "100": { "checksum": "sha256:409af50bd5708e662302020ddef0f314e0da72c996c670db45aea57478e8ee9a", "enabled": 1 } }, "antivirus": { "100": { "checksum": "sha256:f1ed2e1ec9355b898716cc1ed8f18d7983b45b5bb88c2624fe31ffc968dcd6ca", "enabled": 1 } }, "apache": { "100": { "checksum": "sha256:3239b0ebdb98df15e7140b948a2c741b36a870ca02ba8b425ab0de044e7b7dff", "enabled": 1 } }, "apcupsd": { "100": { "checksum": "sha256:47a5360325c4aebd9de805b5113bc715736bb01801cbbf317ac2112d1bae15c8", "enabled": 1 } }, "apm": { "100": { "checksum": "sha256:757508a02ab22f3ad35ce7bc9e596f7c84e62b7744db341f9d4f63ef74ee92ff", "enabled": 1 } }, "application": { "100": { "checksum": "sha256:c5b9c6d5d27d5c119598c1a040e9f488ac1d783c497b13b9c1cce9cafdfdf6a1", "enabled": 1 } }, "arpwatch": { "100": { "checksum": "sha256:a8786979d69981a916a89856cce9aadaa540fe2baf93053bf9274eac36cc38fb", "enabled": 1 } }, "asterisk": { "100": { "checksum": "sha256:f113821fcc3ce3e5e04130937924e8aeb5741935d8b5422af25ef8c4540cedbd", "enabled": 1 } }, "auditadm": { "100": { "checksum": "sha256:05a7fc8acec5e707257376e4ff8fd8a02de27aa9c616c95c0e8ea9239841eae1", "enabled": 1 } }, "authconfig": { "100": { "checksum": "sha256:42a66af256fa45dd7be3e09166aaa0f2a035f5670f39466a2d4ee84e417fd76d", "enabled": 1 } }, "authlogin": { "100": { "checksum": "sha256:caab2da7e513ebcafb2f4829626763ff58f74cbbf7c14b31e1aa1780afdc114a", "enabled": 1 } }, "automount": { "100": { "checksum": "sha256:3196fce29123f8eeecd6175b098063735d77aecfaffa7f6cb641adbcbd9ee977", "enabled": 1 } }, "avahi": { "100": { "checksum": "sha256:9bfb194e5b283124abae90dc92abb883e1cf7b3e2f51bcb9fcf842672c86ac60", "enabled": 1 } }, "awstats": { "100": { "checksum": "sha256:ccb0373eb1228e3a34112ba0124b37cee80def8c7ddcc0981b0676aa59bba2f8", "enabled": 1 } }, "bacula": { "100": { "checksum": "sha256:4ee194dcd976488e6eb0e0ba84d9b6875befc4eca8389c9d0102cac06c04809f", "enabled": 1 } }, "base": { "100": { "checksum": "sha256:3729db2162d149b6905af0db7ae998f7726c3733308d4f83c7ecd02c19c88151", "enabled": 1 } }, "bcfg2": { "100": { "checksum": "sha256:3e147aa1e329d87cdfd30b20b3b80ee0c039e9665e944dc6a28231ebac38d5d4", "enabled": 1 } }, "bind": { "100": { "checksum": "sha256:d7f26a245b0780cda9ed49b1913ebf10733ac091a1312cb58b55f4331c50b9c8", "enabled": 1 } }, "bitlbee": { "100": { "checksum": "sha256:9324bebe4e0cac756830480f53dd8e23a01f3c4c7185b30dd72024394dec2783", "enabled": 1 } }, "blkmapd": { "100": { "checksum": "sha256:fd38032cb8b3e9c24024364c02b07a830e97432cd763f41e82612fd83de06654", "enabled": 1 } }, "blueman": { "100": { "checksum": "sha256:2150b8a1afae75f4d73c76e2982ccc575f599577b8b7042d7bfa87fe2fed3a30", "enabled": 1 } }, "bluetooth": { "100": { "checksum": "sha256:14baa78a99f3fdac7ec18322ab578c030cde924b3c8716cb719ca52e0b67df2e", "enabled": 1 } }, "boinc": { "100": { "checksum": "sha256:1038d007468960019e0957df9b9dd0a76308b2dd5d3942dcef18c937a5130fa6", "enabled": 1 } }, "boltd": { "100": { "checksum": "sha256:e22f75dc547d85b02fdd0f2d4f20f69a1ca9b969ed6f9b9abf957fa02e6e1b7f", "enabled": 1 } }, "boothd": { "100": { "checksum": "sha256:4cc85869cdff4e9ca3abf6ad71d8d96cf5bddc77a0a0775430615e0c8c2a09ba", "enabled": 1 } }, "bootloader": { "100": { "checksum": "sha256:dabf91939aac32788c03a2c0b658d6e951cf80c3b5337e297f6c53bbe3f4418e", "enabled": 1 } }, "bootupd": { "100": { "checksum": "sha256:b0bda821f499f688d6d1d560a55673989cef71e7cd4e20f9cf3c9ce45e91d369", "enabled": 1 } }, "brctl": { "100": { "checksum": "sha256:78276818c60a76e84a9b3263a906d7109a69028c557a2d6a4a39ded2ef713a44", "enabled": 1 } }, "brltty": { "100": { "checksum": "sha256:0230419d38276d2c823a427d44fa1362f89784c7ac17f76826be95d3c8e6de1b", "enabled": 1 } }, "bugzilla": { "100": { "checksum": "sha256:8e4fd93cbe0357fc894c7898adf9a14f8c845d87a08f673f410b702f798c2383", "enabled": 1 } }, "bumblebee": { "100": { "checksum": "sha256:3d6e2bb58a928e53f4d89d5bdbdb6fea3f9ae98e5be81548df29fdb13b204f9b", "enabled": 1 } }, "cachefilesd": { "100": { "checksum": "sha256:d595466a7aa404ade4ad75d45e10f6a37d30529858cf990407f545f7826b364c", "enabled": 1 } }, "calamaris": { "100": { "checksum": "sha256:4c65dae6d30fea1bebc4e63ff693b2041f18d0adb710b783002c98c529be84d8", "enabled": 1 } }, "callweaver": { "100": { "checksum": "sha256:cede7c726cd48a09e9b2974e9f78d9482cc9d01a4ceb74ff190c4a219900f2b3", "enabled": 1 } }, "canna": { "100": { "checksum": "sha256:d523a4d1579256bb5faf1f761ac6a22cb4e8bb8e5544a5843615d4927abb5e76", "enabled": 1 } }, "ccs": { "100": { "checksum": "sha256:496749576c65d4a03024ee5d3be0814965308561f565ec6cd29587c3a3b7ec57", "enabled": 1 } }, "cdrecord": { "100": { "checksum": "sha256:ba7fd7c8a2a4d0fc658dd4aeb27b1c9c2049e7b8b5b5c3afa9523c74f6aae263", "enabled": 1 } }, "certmaster": { "100": { "checksum": "sha256:f9a4c97defbe94abd597286e65015baa5cdbc494a6404f3e467ba2484fd753d8", "enabled": 1 } }, "certmonger": { "100": { "checksum": "sha256:0e7e768c5c69903387f189899e532efc9055539b7d8f1a9b7e2cc6656675a1ce", "enabled": 1 } }, "certwatch": { "100": { "checksum": "sha256:c875a519672af0d8291b9097f547dc940b9f0132a174d49cb4a332e08fe92b89", "enabled": 1 } }, "cfengine": { "100": { "checksum": "sha256:8fc798d839b2d5a643625812c65fd6859b386328ef9b685f279082a38baa1b24", "enabled": 1 } }, "cgroup": { "100": { "checksum": "sha256:a5e19681d20486653fb3395b323b30b77e9c64e99ac70668ebcbaad76b8fac66", "enabled": 1 } }, "chrome": { "100": { "checksum": "sha256:e3576026c0b29d1f662172186e3e3ede0b3b2ed0e4e59a863d00bc75756724b6", "enabled": 1 } }, "chronyd": { "100": { "checksum": "sha256:28d17217c3c515dbb1c0b04d5dc36d50c7f3165144a9ba56b3adce749ceb8467", "enabled": 1 } }, "cifsutils": { "100": { "checksum": "sha256:689032aad1e66934c35a72d87b428787df267d8ba743de741bc7fc4cec47c5ad", "enabled": 1 } }, "cinder": { "100": { "checksum": "sha256:1bca5805677f6de8372027eab3e7b091d28d9098bf5c363090b12f4195051441", "enabled": 1 } }, "cipe": { "100": { "checksum": "sha256:9037f8704235ce4b481935dbe9f81fb7eb065e593e07988b869938948b15f432", "enabled": 1 } }, "clock": { "100": { "checksum": "sha256:62a84539a2764eb9eea9d29b86128f1deea9adb3a3a534c88f5fcdd7637b8575", "enabled": 1 } }, "clogd": { "100": { "checksum": "sha256:f79fddbcdd23396829c104b213648ffd5d5cac6dec8adf2f5f0efa2f7061b615", "enabled": 1 } }, "cloudform": { "100": { "checksum": "sha256:e985f49df5d3f27855f9c727f19138103110d987ed0f9ed8081e39a39aeb813b", "enabled": 1 } }, "cmirrord": { "100": { "checksum": "sha256:2c94298413c5d1fdb88ca748b4c597d2b3e543ed82604378a6ee333276dcaab4", "enabled": 1 } }, "cobbler": { "100": { "checksum": "sha256:7fc7afc3aec313b51b6508fe564813eccb4ce5bc1d650ba9f7eb61a438cee86f", "enabled": 1 } }, "collectd": { "100": { "checksum": "sha256:e574da01bdbc51b1b08385713fa8c9780df9aa657ddab4cd93211e4e9c8b6ff0", "enabled": 1 } }, "colord": { "100": { "checksum": "sha256:1887fa9a04336ceeeb73c20f3f7f6d6e6994f2a4b1cff802ca64e142ea1bff05", "enabled": 1 } }, "comsat": { "100": { "checksum": "sha256:38a4f293f3f8cdb18203c347a11304f161264aaf794ba6da418804f090c0e7a3", "enabled": 1 } }, "condor": { "100": { "checksum": "sha256:e8d56cababfbca2569c42055e8fac5782d6ae8b42ba2b236b9563f7eb6c2ef4c", "enabled": 1 } }, "conman": { "100": { "checksum": "sha256:e172ba28d687e96a2ffc463bffbf202ce44e7bff6e0685adc4b89a67cdcdfd40", "enabled": 1 } }, "conntrackd": { "100": { "checksum": "sha256:794a65c72b01138ecd19d9e679f62129d33183f9b1324b064c16301fa79d53d4", "enabled": 1 } }, "consolekit": { "100": { "checksum": "sha256:e585da6eefaff112870deda0ced61b75f1a5b120b7bc2133c8b1ec9452695646", "enabled": 1 } }, "coreos_installer": { "100": { "checksum": "sha256:699c69e48f1ec1dd8f915f094a83a6e2f6b6fdd515233fd8867649f254509270", "enabled": 1 } }, "couchdb": { "100": { "checksum": "sha256:743992902ac8514de4dd54b16317a8edb9cb1e383e29d0450821a9ac304059db", "enabled": 1 } }, "courier": { "100": { "checksum": "sha256:3bb1b730a3a6f236b312c3f71172364911992d35e93c1e5cd1e1fb076eb2d3eb", "enabled": 1 } }, "cpucontrol": { "100": { "checksum": "sha256:969a757fa353d20473f2eab457378e50d6f32b1da19f6744f7f20d5f3f99744e", "enabled": 1 } }, "cpufreqselector": { "100": { "checksum": "sha256:0b0bd5618356e7da7018c86dc262758bb6f04e7618de2942995ad59e897c3244", "enabled": 1 } }, "cpuplug": { "100": { "checksum": "sha256:211dc5348ec5f69d5d76aa3d07c74624bb39cd2aa082262e5bc4c6c165f677e7", "enabled": 1 } }, "cron": { "100": { "checksum": "sha256:0314fc16b04ba21878c63ca1d439187d19d998e4e0fd5b420c50a7ab736b81df", "enabled": 1 } }, "ctdb": { "100": { "checksum": "sha256:aadb573940c57a51a13c72bfa7a074e440d7b6b3d2440c3a435c1b41742d2b3a", "enabled": 1 } }, "cups": { "100": { "checksum": "sha256:4855d463c5142f1f001a028136bb737a837c6a0d87807c0b97b1eb23b897a05f", "enabled": 1 } }, "cvs": { "100": { "checksum": "sha256:b188990c62c036cf96deb2fd121236499dcb5cff830e38ac24b74d4692c0323d", "enabled": 1 } }, "cyphesis": { "100": { "checksum": "sha256:ffe9e6e4bb673f6822a874b86eb21769ef0f5fdbfd2e2659485abc4c6215ffa2", "enabled": 1 } }, "cyrus": { "100": { "checksum": "sha256:8b077e1368da1daaa96272b0d6f22f08bcb264617e6884c9074423a9b066912d", "enabled": 1 } }, "daemontools": { "100": { "checksum": "sha256:c9631ff4c56b588338bd87c7d8cba95e78285ecfc4f12653ea5c888d2fee8af0", "enabled": 1 } }, "dbadm": { "100": { "checksum": "sha256:96b16cfa7d12cd4f994a84199fb20016a24fe5bbbf6888487ef4fc0b0f7008dd", "enabled": 1 } }, "dbskk": { "100": { "checksum": "sha256:5c3f9427b9dcfa7aa76a67db7c3f9a33ca299916a5608cd985fcb5f30fcc405a", "enabled": 1 } }, "dbus": { "100": { "checksum": "sha256:3022c933d9c2baf7e8f7520210ab2776c889d292a96bf56ef1da0ef398e93a93", "enabled": 1 } }, "dcc": { "100": { "checksum": "sha256:a30fe6e8e452f73079c37dcc1dac02ce317ba2310f8af8b6fc08b623adc89be1", "enabled": 1 } }, "ddclient": { "100": { "checksum": "sha256:eee9cdfaed63e516667ca9f76565fcc909c173c6440bfd19dca5c12fca61692d", "enabled": 1 } }, "denyhosts": { "100": { "checksum": "sha256:7a239615a4db634ae1ae8564acc54c55c86c68bd2e44d47ad9c24758d492b22c", "enabled": 1 } }, "devicekit": { "100": { "checksum": "sha256:17ec39ae4334df1f47531199b3f12954bc848810d1eccaadc0463174aad953f4", "enabled": 1 } }, "dhcp": { "100": { "checksum": "sha256:15aac55748ee10a93ba0547d3c20670148e01d14b74afae89df0e660e3396dc3", "enabled": 1 } }, "dictd": { "100": { "checksum": "sha256:9f5a80130d50d12f0ab8a8d48f9ccc2b058fffc0b5704d3a6d3d44787b01f4c1", "enabled": 1 } }, "dirsrv": { "100": { "checksum": "sha256:d6baf0237cffb9acc28f3e883ccc3231165c75927427be64539ee5cf0a13fed9", "enabled": 1 } }, "dirsrv-admin": { "100": { "checksum": "sha256:e7ec5090958fbde915f221e49396a45b968b8a2d6429d74000ffefa8b6e87e3f", "enabled": 1 } }, "dmesg": { "100": { "checksum": "sha256:19cf6b1658e2a7ff92db6c537944a989ea72e88173663ba4589f71132a80be61", "enabled": 1 } }, "dmidecode": { "100": { "checksum": "sha256:55423cc64cb3aeccc3e05952327db479669ddd5f5e98bcbc275859d38333e79b", "enabled": 1 } }, "dnsmasq": { "100": { "checksum": "sha256:18f7d99ff73fb7c2256ac3d86d4328098274cf2b8fa92c8b42da37867f6c7b31", "enabled": 1 } }, "dnssec": { "100": { "checksum": "sha256:1584e02889b2969514ddfcdc5fa31152ba8505c66c1b5f88e75b3bf23f96bec3", "enabled": 1 } }, "dovecot": { "100": { "checksum": "sha256:57d7924556ab25fb130b93002b1da35e886ef1ad01c4391266cedcc01848ea8b", "enabled": 1 } }, "drbd": { "100": { "checksum": "sha256:b96580e08581043a5e5fe37b67ab937e074e4218d685b23df6f9421ca49d5248", "enabled": 1 } }, "dspam": { "100": { "checksum": "sha256:90cdf514efbb5bf01f1e2b90d7d58eed2e2b458242905b5cfa32e1de01d71da3", "enabled": 1 } }, "entropyd": { "100": { "checksum": "sha256:ca075b4e9b9aee4bb5946407a418fc229a1c6fe4c7abc9487b9ba9770692c78d", "enabled": 1 } }, "exim": { "100": { "checksum": "sha256:413b889a36fb01d9d3ee8fe149aa2d11702e5e44afceef42a0d7450a421cc89c", "enabled": 1 } }, "fail2ban": { "100": { "checksum": "sha256:bfd591b0e99fc96cdd443d01c045583e45fc0ba1e8d6b7a18f2596473fe11ac9", "enabled": 1 } }, "fcoe": { "100": { "checksum": "sha256:8e2cebfc7249a86f6b40690946f6281f83a343eeef929d7e1588bc77622004fa", "enabled": 1 } }, "fdo": { "100": { "checksum": "sha256:af6e6cb692c95d34835b90b4a1c449a95445710e8fac4ca0d999f3cc85540e17", "enabled": 1 } }, "fedoratp": { "100": { "checksum": "sha256:d6f5f286ed1d5705e62598f143c0150deb59d708d7dd1ad89bdc6d68f59ed853", "enabled": 1 } }, "fetchmail": { "100": { "checksum": "sha256:3acaf8ff26c8d81010cc1ccf88872df39199279fa44d85b06903c97f50def08c", "enabled": 1 } }, "finger": { "100": { "checksum": "sha256:17aedac05f634eb036f29d301d8b0416cdae5f3967f88c0286761e209858f07e", "enabled": 1 } }, "firewalld": { "100": { "checksum": "sha256:64801f8ccd6f426ce8b4cc674e4288e0424a745b6a882102d7f70fae01e438aa", "enabled": 1 } }, "firewallgui": { "100": { "checksum": "sha256:18bfb3bc0946b731f05e969de50189403372c76fab801f83e2687a9d43b86622", "enabled": 1 } }, "firstboot": { "100": { "checksum": "sha256:4c9915a1c1cf1e67ea429fab342c29f2caa911f5d615b86f9eb8da496d7b5d54", "enabled": 1 } }, "fprintd": { "100": { "checksum": "sha256:ff74bb68f3369f30d2ae98cfce6acecbd6b7adbad46a18cc03bba25037a26beb", "enabled": 1 } }, "freeipmi": { "100": { "checksum": "sha256:38cefd0e603535dcb11f9291f4948f059693fa7371f3ee92654cd3176a5195a5", "enabled": 1 } }, "freqset": { "100": { "checksum": "sha256:3f2c6b1336ed28a0a7d90f283e9289a5f83e5ceb71d11b7814fdc65ae529314a", "enabled": 1 } }, "fstools": { "100": { "checksum": "sha256:cc5947167cf3dc0d821c1f146d6253d72a3016b1aa8b5a4b4ec3f82d1e6631c8", "enabled": 1 } }, "ftp": { "100": { "checksum": "sha256:22e96b5040327ea8554d0216ef4e28d6a94abdabfe3a211a6535941bebebd013", "enabled": 1 } }, "fwupd": { "100": { "checksum": "sha256:3da67a17b85cde7a0f181e6599726357fb2eeeb251fdd980a9d6669772415922", "enabled": 1 } }, "games": { "100": { "checksum": "sha256:57d4e4d5d9ff252fa8703355ed68c854817ac762c6120ba02364cc19c67491b1", "enabled": 1 } }, "gdomap": { "100": { "checksum": "sha256:f41fda08cf3bd33ac571a4ca5d6d505a6dc7062c29e45b1d5d160c953c532230", "enabled": 1 } }, "geoclue": { "100": { "checksum": "sha256:beec56305b71ccb0174a7c790eb972244d10b8381a84812f01021b9e8eb7ad99", "enabled": 1 } }, "getty": { "100": { "checksum": "sha256:a6fac65e97c9962f1066c0a2fc839e67597cda9061557affb60a01df78184a4a", "enabled": 1 } }, "git": { "100": { "checksum": "sha256:9f2c83dc9cafe17c8d650f8c78c39c61bd3d2c8b4d77a535eb61ce6a04a3e3f4", "enabled": 1 } }, "gitosis": { "100": { "checksum": "sha256:b7decc3fbc86e4b9e3fe1fec22021737ad607cf795224b5d5793069716a7eda8", "enabled": 1 } }, "glance": { "100": { "checksum": "sha256:097520793eaae6b878f14f2b767d40a80007786065abf1b252d898a685957fc5", "enabled": 1 } }, "glusterd": { "100": { "checksum": "sha256:9fa405b48cc837c307845447cc5444caf258bd0bd7f00bb6ba6c9bc623d7b15a", "enabled": 1 } }, "gnome": { "100": { "checksum": "sha256:1eeba013ebac8d310e67170a313c2d11669eca7ba4aaef13e330e5d92f17f94c", "enabled": 1 } }, "gnome_remote_desktop": { "100": { "checksum": "sha256:32cd19e48502d4fd69421e291d757f6f3f715696bbbaf99dcfe3f567743ebc86", "enabled": 1 } }, "gpg": { "100": { "checksum": "sha256:7c8bb6410fac1ef1d07e3c7c60906737c8e22176231b24207ea6bf0db41951b7", "enabled": 1 } }, "gpm": { "100": { "checksum": "sha256:f3799d846d2d0bdc1e1d3d075565002b1cda87f798b852691135012b5f545d1b", "enabled": 1 } }, "gpsd": { "100": { "checksum": "sha256:c38a1e2a1cb3c1a8d400f506cc9f3f6dd15d78234f982a04bf4ad8b4f6308937", "enabled": 1 } }, "gssproxy": { "100": { "checksum": "sha256:a7d4cfe7f3250d86a409969323f858b4dca8b40339125e15946a133884b47e7e", "enabled": 1 } }, "guest": { "100": { "checksum": "sha256:c0f9f2a0be3fe24b4f74acf4d1924183310b454591b8e8219fccc61c2194f167", "enabled": 1 } }, "hddtemp": { "100": { "checksum": "sha256:82815af35d160bae556a7c6b8862da02b469fbb90ffd5472a6b22c18bf0f1fec", "enabled": 1 } }, "hostapd": { "100": { "checksum": "sha256:ea0ff5fc3de7fc7a93d4b64c0bc54a4040d9427c9eaf8200b97388166cc0dce6", "enabled": 1 } }, "hostname": { "100": { "checksum": "sha256:17a1021557fb77a340af451c33cf80f05ab309a719778d84813d8ab67cd18f6f", "enabled": 1 } }, "hsqldb": { "100": { "checksum": "sha256:201698320ad8f5f35391ea84f0aad40c26ba8db2fe006b38c7b65b8dc71f45b7", "enabled": 1 } }, "hwloc": { "100": { "checksum": "sha256:a5454f4868d22b9ae798ee6f85df6d8b6b35b456617a02084af47d393322ab76", "enabled": 1 } }, "hypervkvp": { "100": { "checksum": "sha256:c89e58ec9c52039bc7ddfd048fa5b287780a48f78e70ce64181607ad33da6c47", "enabled": 1 } }, "ibacm": { "100": { "checksum": "sha256:b7ab0f90999e86d8fff9de3abeca0f093f2bf9f73330d434e947fdb5bfcc63b3", "enabled": 1 } }, "ica": { "100": { "checksum": "sha256:bd8eccb4266130eddfae34318855e6aa3a8624998f0a906a970a04802510ad86", "enabled": 1 } }, "icecast": { "100": { "checksum": "sha256:3199646b83c13466308cbe243d6c75f828875ab0dfca1deb585dcd7bfb28ae60", "enabled": 1 } }, "iiosensorproxy": { "100": { "checksum": "sha256:a5d5908ca11b9ffb012615c1e4fe77cfbcf1a429daccb97abc614c97d3fb2377", "enabled": 1 } }, "inetd": { "100": { "checksum": "sha256:0e2a80d0943b3fea8756b2d487038182ed624dd36e9177305fafbc1ca0374af3", "enabled": 1 } }, "init": { "100": { "checksum": "sha256:1144dbfa7278341caadfae995aeadd7453cf78762b08ba4d0142eeef80871d18", "enabled": 1 } }, "inn": { "100": { "checksum": "sha256:d60fd47ec62ca3bca272ca38d8fe86fce36d6f04c44d01410d30428c8b914861", "enabled": 1 } }, "insights_client": { "100": { "checksum": "sha256:c4e83821f0150988e86e714cf9e088ef11abd0a30dada90cb7e68a95c406a921", "enabled": 1 } }, "iodine": { "100": { "checksum": "sha256:6185225b7c5910d94396a18d93429704434d564b4478aeea9d1a8c54f6b12834", "enabled": 1 } }, "iotop": { "100": { "checksum": "sha256:7db93e52e29915a08c82b0ebac02856e9a484923d2ce215946c8f54cf5297704", "enabled": 1 } }, "ipmievd": { "100": { "checksum": "sha256:ea599044fe2e2d25c8078ac0b5b0d2c589b4410dd7958eefdc0217f8b8c8c3d0", "enabled": 1 } }, "ipsec": { "100": { "checksum": "sha256:048f5c3f5f3e706697d782c57b9d6cae882b008f7973b41edb3de37762e4c664", "enabled": 1 } }, "iptables": { "100": { "checksum": "sha256:e315834c2a287c9b9f8d31a6fb5f62360241fab584b68ce05984035ff67f01dc", "enabled": 1 } }, "irc": { "100": { "checksum": "sha256:1ed937287398d60e302478f4880a25c1aef605a5d370c1f583426c59067d8ed5", "enabled": 1 } }, "irqbalance": { "100": { "checksum": "sha256:12e969265cff2e8d4ee9be8b99da0bd47138ff8754039dd1387a14b2a4a03b79", "enabled": 1 } }, "iscsi": { "100": { "checksum": "sha256:6666da694f33e444225327d9fc58208b230a1e1244e05335e16217c507ac0627", "enabled": 1 } }, "isns": { "100": { "checksum": "sha256:85a313d121f764081dc92c4c1ed5773b8cb7a7113b84b66339a48f76c57efe54", "enabled": 1 } }, "jabber": { "100": { "checksum": "sha256:08288d791f1b1aa3a8f154ae7b2ff73d574560bf5bb82246020baa3c26f6c988", "enabled": 1 } }, "jetty": { "100": { "checksum": "sha256:6aea2d07d8f040434ff0c0d5e9b81e825b1de56cd01c0e0a069ee1f0d5196885", "enabled": 1 } }, "jockey": { "100": { "checksum": "sha256:f35d7f2d61175ba7f908459f738c7d6b6adf2ae678d88531fa527c4d50b21985", "enabled": 1 } }, "journalctl": { "100": { "checksum": "sha256:1096c205a0bf2f84f1b2e52698a681f464f6ffef74145e6edfb81d57ad910913", "enabled": 1 } }, "kafs": { "100": { "checksum": "sha256:5a35385072b421cdb86fc66190dc00e6c8e7a587a60ffeedfe2a931b6b92a11d", "enabled": 1 } }, "kdump": { "100": { "checksum": "sha256:370090682ef9410c2d12556dfd1cb6f8f7f7ab60287148ead03b3b11d64146e0", "enabled": 1 } }, "kdumpgui": { "100": { "checksum": "sha256:c6d5f08d82062fe0634f0dcc9ee434fcea7760141740b79cbae187d970984a0c", "enabled": 1 } }, "keepalived": { "100": { "checksum": "sha256:6fa6bedd26d8e4b178a2d74158634f26fc2b322782e4685d69ca48f4854e037b", "enabled": 1 } }, "kerberos": { "100": { "checksum": "sha256:994e8629c436a7da2ed914f7499e0a33f7cc2737b5c70751b280d73de8a9fd19", "enabled": 1 } }, "keyboardd": { "100": { "checksum": "sha256:4eae5ab2b926695b3ed881a26e7b6a79f4dd48c1ffa3e3c6809cfa4958b54091", "enabled": 1 } }, "keystone": { "100": { "checksum": "sha256:6d9a8c7420b3aee2c752119ef6d72dc828cd76ea4f535bdcd44461a3cbb43454", "enabled": 1 } }, "keyutils": { "100": { "checksum": "sha256:fa398b6a78348b789660ea073244265b9bd8742fd81dd3569c9d56b1b9083a00", "enabled": 1 } }, "kismet": { "100": { "checksum": "sha256:30ba7e91c6fe3cff8ac488e6160a5037fb8f1719275991ec44e5bfcbd9089aca", "enabled": 1 } }, "kmscon": { "100": { "checksum": "sha256:f84aad2ecda576c41ff72438aaa394122fe6f13d274adb17a2cf145718776969", "enabled": 1 } }, "kpatch": { "100": { "checksum": "sha256:16b2c313e69ba9a2726025ee54bbfb638c633f91d53db454e204adef45e5b5e1", "enabled": 1 } }, "ksmtuned": { "100": { "checksum": "sha256:328ce002901ec9be77a5035fe50b01b9e01b016128efb981a7fb0aa586b55a77", "enabled": 1 } }, "ktalk": { "100": { "checksum": "sha256:311f6dd8403d55da5f3173593f5fa6331d4cf7570af2b9d0182f7b260ba7285e", "enabled": 1 } }, "l2tp": { "100": { "checksum": "sha256:cc0faedcac2d86171aeb9a89ec83f2d03cb905f54296f6f085415fcf7ddd1a06", "enabled": 1 } }, "ldap": { "100": { "checksum": "sha256:f1a7908800f08a04aade8083ad79d002aba0c85f1699f35ded5a0eaf46dbc41a", "enabled": 1 } }, "libraries": { "100": { "checksum": "sha256:91a8d1cb1ea2e44b9395cb7a44776e7992983378fbc72dc3725d0e03fa673a40", "enabled": 1 } }, "likewise": { "100": { "checksum": "sha256:c83754418ce993f0d9dff4ad73f0e64362ceb3ed073a093dcdaec0ac6d8c8b5a", "enabled": 1 } }, "linuxptp": { "100": { "checksum": "sha256:6a6b3d623e27e1e8a1ce185ba3ca42d3c7fb5dcad8cc91ab54950a1f69853dd7", "enabled": 1 } }, "lircd": { "100": { "checksum": "sha256:564e3801937503b310cd4fef750cea8bd1be7ce323cf41276cfb0eedb0fbdc8c", "enabled": 1 } }, "livecd": { "100": { "checksum": "sha256:9070ba1f03a04a5906bafb1399a0eb181dfdeab868dfb021505c116c61decbe3", "enabled": 1 } }, "lldpad": { "100": { "checksum": "sha256:34dac23f45fce0951b4c1cb9a21465be98eaa5a5f2f0641a6254e0dc45236689", "enabled": 1 } }, "loadkeys": { "100": { "checksum": "sha256:ee0cf6f2b19c24b7bd76829de7de00b60f67ae9d96f2ebcb8eb09b961b6dd751", "enabled": 1 } }, "locallogin": { "100": { "checksum": "sha256:56bf0d727637f1964d5f71a7ccc5edba76cfd50d2e546e9558d0551ffc968858", "enabled": 1 } }, "lockdev": { "100": { "checksum": "sha256:3cd6822d612f355cc8000823f47b7ab0087035b59b57904c145db627cc58d22c", "enabled": 1 } }, "logadm": { "100": { "checksum": "sha256:6b69c0a0528321f39311d34a94e750c5cfffd9bbcc00dd3c06f445f2a7bad660", "enabled": 1 } }, "logging": { "100": { "checksum": "sha256:274d379294253625ca276e6463b80d3b3fa88deabfacb4b4472f8f046119dae6", "enabled": 1 } }, "logrotate": { "100": { "checksum": "sha256:5ad554f859ea8313b48b7592cb1d1c3e13884199c9d9f939a99bb0651cadc5bf", "enabled": 1 } }, "logwatch": { "100": { "checksum": "sha256:987ac97d3851ce51c2aed8c8b08c6dfbaf722ca65447097c4531644d458637bc", "enabled": 1 } }, "lpd": { "100": { "checksum": "sha256:fe9e6623bfd94678b32ba9a20c3e04469aa69a5175a1f626368c1fcacf7c5026", "enabled": 1 } }, "lsm": { "100": { "checksum": "sha256:3979d77408c282fc3abfe32908df4889d7ff0ee8096ccb68a6a91d44bebfa56a", "enabled": 1 } }, "lttng-tools": { "100": { "checksum": "sha256:b216fc3f1025723b5bf96002b39dcb9ae8c80c6d6708dc2db348f419af025a45", "enabled": 1 } }, "lvm": { "100": { "checksum": "sha256:e67d95db5bc472d9d8a819db5c947190543bb6eb971955dde57e5249655033e1", "enabled": 1 } }, "mailman": { "100": { "checksum": "sha256:0abc78cfd35d9e43c0c21983a250f874ee7ca7c10d7eb1c6e0d3106f538cf482", "enabled": 1 } }, "mailscanner": { "100": { "checksum": "sha256:b76bb64b5c5dd0db6346ae7ba19faed97ed414c7d03d7b8747e0919867a7b8c9", "enabled": 1 } }, "man2html": { "100": { "checksum": "sha256:3269314b5d8bbfc843900344d65e604432222bebc7e2dd628abb9d186fca1cd1", "enabled": 1 } }, "mandb": { "100": { "checksum": "sha256:c5c6c2cfc4930bb865330ac4bf1e04976290772b0e058cce0d73de071ccf7819", "enabled": 1 } }, "mcelog": { "100": { "checksum": "sha256:7a137918a31fc7684a8ae929a5046517be97399b2534a6b471e3cf3bd7f355b3", "enabled": 1 } }, "mediawiki": { "100": { "checksum": "sha256:4fb79885af1e12d162aaea04067c9dbd19c248b8bc6bb4fa91240acb6986c84c", "enabled": 1 } }, "memcached": { "100": { "checksum": "sha256:10e59adbf0210a83d2506cdcc189bed58fbb49f01ecc07afe00a5fc06a0f025c", "enabled": 1 } }, "milter": { "100": { "checksum": "sha256:686b92d2ed35a14042a3d6d95af167aa0b1bdb0bb1d1894909c3d4b5ad70ca34", "enabled": 1 } }, "minidlna": { "100": { "checksum": "sha256:6b200b3b9aafc62c019eba4121e1743ef9670896b70f8b5201caa19b975d6964", "enabled": 1 } }, "minissdpd": { "100": { "checksum": "sha256:33b06281053cbe0c55752bfcda928a48a50743eb2d061d429b07ec3b0dc28a04", "enabled": 1 } }, "mip6d": { "100": { "checksum": "sha256:1c657a9f8eeacab2a91a9725d7c5980624ab90e0e5c67c2f9270df3b0ebc9fca", "enabled": 1 } }, "mirrormanager": { "100": { "checksum": "sha256:5b9141bfd8861fae68d1054e5c15d503815c2191e68348405d5f8f2e53757b5f", "enabled": 1 } }, "miscfiles": { "100": { "checksum": "sha256:4d0081333a7d5871e89e8955ab640c3374bf2bde197557cb6a83e122aee9137e", "enabled": 1 } }, "mock": { "100": { "checksum": "sha256:a03da1b4a0188f7ed4057cc37b734745624b265fdad71ed6c2d00467b72a4846", "enabled": 1 } }, "modemmanager": { "100": { "checksum": "sha256:808b6b6f4a5860ba20f75d8b6992bd5284a90269a98dccdb38205ff67d253dfa", "enabled": 1 } }, "modutils": { "100": { "checksum": "sha256:03fa2b1f672c7902f9c88bc3048c996ed5189bf6a965ffbae30b2cc555662aa6", "enabled": 1 } }, "mojomojo": { "100": { "checksum": "sha256:ce4c112a710ad3b571ab733fbf785a6ac7b9eeb9c29f3ced5f994965e9386ab4", "enabled": 1 } }, "mon_statd": { "100": { "checksum": "sha256:db72ccb5d6952aecb9188c455e70a0b8c6feb25946b44dd0884942e74074a65a", "enabled": 1 } }, "mongodb": { "100": { "checksum": "sha256:a5753efd9c79243f87ee59feb5c3914e9c6a435f70a053ff59657baa91f0c8a4", "enabled": 1 } }, "motion": { "100": { "checksum": "sha256:13c108e4971acbc506790783af8287ab766941ef3745594a69e020dfa769e75d", "enabled": 1 } }, "mount": { "100": { "checksum": "sha256:865539a39bf40f78356e8b2cbb08b24926913db3a08fa9796e4c2543882aeb25", "enabled": 1 } }, "mozilla": { "100": { "checksum": "sha256:1868c9ea9e81cd4e82ae9373693db84070213c92d32cb623389b1512c24f2e86", "enabled": 1 } }, "mpd": { "100": { "checksum": "sha256:2b0acf523137d68497ea06f19b5494cb5c7ad6e4102fd7c626b9b74fef062f19", "enabled": 1 } }, "mplayer": { "100": { "checksum": "sha256:31d2bd89013c8c5e58a1e2e1b393c3db729cfb92080c1935f01cb63da15920e4", "enabled": 1 } }, "mptcpd": { "100": { "checksum": "sha256:3d4452b8e5b08d56b16c141515f2169809ab160544da9164cd91c3872fa5ca59", "enabled": 1 } }, "mrtg": { "100": { "checksum": "sha256:6757ab3ee1b84340b5a58935d090b926ffc96f43d03c9cb243802b4d01d5a29b", "enabled": 1 } }, "mta": { "100": { "checksum": "sha256:8dd4e90b95311cc5dae5b11733cf9ff8be46637d84c5aec28ae62a0b11dc452d", "enabled": 1 } }, "munin": { "100": { "checksum": "sha256:403d5e09c6c84eee2d3c80a1c50502d8850469e5c9d74347aacc1c8f7496861e", "enabled": 1 } }, "mysql": { "100": { "checksum": "sha256:2d19d6dc11b839551e1a98aa95588c06a3e1dd84dcdfe615bf61f26dd9ef31a1", "enabled": 1 } }, "mythtv": { "100": { "checksum": "sha256:f260b769c9d70fe26cddea75c71e1c3d16348233c40a3b8a844358d138c19d2f", "enabled": 1 } }, "naemon": { "100": { "checksum": "sha256:9c21b6bd9db730c7b79a4852236f92228282eb2dd06a2ff722b36acf70553386", "enabled": 1 } }, "nagios": { "100": { "checksum": "sha256:2681295ba94471abc9d7d49b6da5d757409c6c95745586d1671a4765ed61b130", "enabled": 1 } }, "namespace": { "100": { "checksum": "sha256:cc65424c4edcef752cf3d9223a0a49d84f7250bbc9c42d08d0b5727e0168dff6", "enabled": 1 } }, "ncftool": { "100": { "checksum": "sha256:9795f4fc6fa6e09a6976b87c80aab11801ae7b43d4f0df1b603b8799f4ff0115", "enabled": 1 } }, "netlabel": { "100": { "checksum": "sha256:229788201a8048a7a80661a258cabee570ce65b4c5fef318a86f8ba7f027975d", "enabled": 1 } }, "netutils": { "100": { "checksum": "sha256:e25a8a3a44a86e24a136028a71f8c242b6eaa7ef86da8fe34d010de2f027616e", "enabled": 1 } }, "networkmanager": { "100": { "checksum": "sha256:7fece44e7188f93aa358d386ae81372812b47a046f10ce081358af85c4c0f229", "enabled": 1 } }, "ninfod": { "100": { "checksum": "sha256:13dcb24aa31902b75b5416a1f2d3dbba243445fa8ae1581ea41029ca21cdbc60", "enabled": 1 } }, "nis": { "100": { "checksum": "sha256:51dded020c93ce723bb8cd739359983f6ed82b5d70e93980d2fd095aaadcc168", "enabled": 1 } }, "nova": { "100": { "checksum": "sha256:000c5853f5261b435c8a0362720ce04a1ed300aed5f22dfee9659e8a024466a0", "enabled": 1 } }, "nscd": { "100": { "checksum": "sha256:9fb6cd0d5e35bcfcd051889e81e2834b4b50861455a0d8ee30920df919d3067a", "enabled": 1 } }, "nsd": { "100": { "checksum": "sha256:c940ff7748160326587bf9084d5b7906d501ecf9576707bc19f20ee759023442", "enabled": 1 } }, "nslcd": { "100": { "checksum": "sha256:a1af29229de29bfe64bde5357ea9e2a47d5b82f8329f62be4ddbc184fb8ead22", "enabled": 1 } }, "ntop": { "100": { "checksum": "sha256:20d8e86dcf15719ed0b481b0f5cd521b2ada532e712c6e1cb9f89e1736466ea0", "enabled": 1 } }, "ntp": { "100": { "checksum": "sha256:74dfbd9db80b05cc69c090005f02ab089695f21c0e0b70c2a7d8300247624e1e", "enabled": 1 } }, "numad": { "100": { "checksum": "sha256:f56cd3e22cc4e864d6cb5822812de4c12884f1a31f3922caa45ce0b61f8f86e2", "enabled": 1 } }, "nut": { "100": { "checksum": "sha256:36ca1c4edb066c2090e5ec5e08f71050424e5efdc74c440e9a0161c7ccf0bd49", "enabled": 1 } }, "nvme_stas": { "100": { "checksum": "sha256:b0afd194fe6e16dc0fbd976301321d6e68a9cd837ecdda2c3cb739c07c4757b9", "enabled": 1 } }, "nx": { "100": { "checksum": "sha256:dd19058574329f7c7ce709bc94f6c4be87028cdd184cc365a61d5c0113b78bdf", "enabled": 1 } }, "obex": { "100": { "checksum": "sha256:649c497ab74a203064009d553b42829aac89ac5c4273b7c0ccc0a23530001fcc", "enabled": 1 } }, "oddjob": { "100": { "checksum": "sha256:4c0624c2e3095eb392ee93e8cf98a2242598b7694de12ae75ceffdc0043d1043", "enabled": 1 } }, "opafm": { "100": { "checksum": "sha256:233a9390d9aeb540956f43756b2e45547fb7c2c4d4070460b94896b72ccd607d", "enabled": 1 } }, "openct": { "100": { "checksum": "sha256:37ac5651f2ce2bfda7d898a024d560c6cdcd54da32409ced32c8b6df059370c3", "enabled": 1 } }, "opendnssec": { "100": { "checksum": "sha256:9890fb3013a4287e6850621c5c6a462a254299b927609fbe6bf67f141862f3c7", "enabled": 1 } }, "openfortivpn": { "100": { "checksum": "sha256:75fa83f68fa800bef996da027f14f6c8f0ded93f93569f3b559495dba39a6176", "enabled": 1 } }, "openhpid": { "100": { "checksum": "sha256:dfbf83e8feccc45bd9de7bd5a3f90edd49c3190b5010f9f98457d995ae974f8c", "enabled": 1 } }, "openshift": { "100": { "checksum": "sha256:1b7ff4ebc979d45777979c0340790dcf10a3e9353272c2e99f6f4f4f93987020", "enabled": 1 } }, "openshift-origin": { "100": { "checksum": "sha256:e8e6bb283142b08b3a1ce7c097f49f402bb6d59eb6d03138ef0b69f3579e466c", "enabled": 1 } }, "opensm": { "100": { "checksum": "sha256:c1bc06d4e5a22837586d3d7ee07922f26b4cd025687cdfd7e3e3789e1c5bdf3d", "enabled": 1 } }, "openvpn": { "100": { "checksum": "sha256:800615bda29f1b49c90de283570ccb018a28ddfff34a36bfe84a4d0485c37138", "enabled": 1 } }, "openvswitch": { "100": { "checksum": "sha256:0bb8334afe603f02ef4f1a46517f970bb30fb191e1d9d944daee33437ae89b46", "enabled": 1 } }, "openwsman": { "100": { "checksum": "sha256:c6d157e74586892234883ad01bb4d2e386637cdd04e45c3587a1cbc8c34e35f9", "enabled": 1 } }, "oracleasm": { "100": { "checksum": "sha256:802090ff2dcc2db35b369ac359e51b1418a5a779cf94e5a14a01a1882b583d64", "enabled": 1 } }, "osad": { "100": { "checksum": "sha256:d3b5dec9cf6640c48875229fcdadc4f9c4bfa1088bff93cb61a18360197a691e", "enabled": 1 } }, "pads": { "100": { "checksum": "sha256:23da314c2255bb1c19dad65a242681330d6bdddc2ba4a0b80ae2ad2249c0a52b", "enabled": 1 } }, "passenger": { "100": { "checksum": "sha256:75e644cc79f5447d17ed5461e52b37f29dc86806320b0ef562b2acd8e2870121", "enabled": 1 } }, "pcm": { "100": { "checksum": "sha256:e6c118ffde5286b1f9013a26f857d4d85dff46d282e4a7ae6548b84e270d7c7b", "enabled": 1 } }, "pcmcia": { "100": { "checksum": "sha256:6d1d142817f6c7013fa94819eaae391f98b69357bfb352584c1ef104d93aa08a", "enabled": 1 } }, "pcp": { "100": { "checksum": "sha256:3f0071ca72c2269c6568f2695aea954cbb6a8504efa48a8ed1e94e2c0baac106", "enabled": 1 } }, "pcscd": { "100": { "checksum": "sha256:9bd33415ca0bd34a05461719de21697d486512341069567c26b25cd6a7b06c00", "enabled": 1 } }, "pdns": { "100": { "checksum": "sha256:5dcb629d0e7578776fb9476a8b66ff883fe52da8be37e3b1f7a3bd4810354511", "enabled": 1 } }, "pegasus": { "100": { "checksum": "sha256:0312d08cfc7b9ad408ce3d34a27b27c64c62e1b00d280c484616927a859aeb47", "enabled": 1 } }, "permissivedomains": { "100": { "checksum": "sha256:2453bad4ace526f3cf2c60b358e95a5476692ef25da107b10f52f3af27c056d2", "enabled": 1 } }, "pesign": { "100": { "checksum": "sha256:597ab5f0f472ea2f2aca1b3754c2457cc7769f79daaec0856ad2675e53867b80", "enabled": 1 } }, "pingd": { "100": { "checksum": "sha256:877bbc42240491f32726ac44a082942e80724e3bd3985c64845dee49a060a144", "enabled": 1 } }, "piranha": { "100": { "checksum": "sha256:4e9b8ae11387a45c9266a82f79f31728bce6397b8e7f8d39287b78dd58ddfd96", "enabled": 1 } }, "pkcs": { "100": { "checksum": "sha256:81dc1bee85071c65da6d7e4dd12f390fdaeae5d7a285c132c952290152f90c0b", "enabled": 1 } }, "pkcs11proxyd": { "100": { "checksum": "sha256:184ad49c7d752eae216e8e7313fc50952515caf2d4fe77b1315204a436979cbc", "enabled": 1 } }, "pki": { "100": { "checksum": "sha256:611152fa77dc465824018a6b6e2687f2a1e483f92e20d9754c33f8032d7a210d", "enabled": 1 } }, "plymouthd": { "100": { "checksum": "sha256:e73478b5cf88be80253fc65ed0f397e9078272320ce939b471230ade52207983", "enabled": 1 } }, "podsleuth": { "100": { "checksum": "sha256:ac303c7ac224ff7fec004f7db6bdc6cabc5e9fbf1df3e27ab454f3f87de18b8d", "enabled": 1 } }, "policykit": { "100": { "checksum": "sha256:f651b9a5f05bc1884e58553525034b9fa376084bc3eee5ecd5cb7b70c42a55aa", "enabled": 1 } }, "polipo": { "100": { "checksum": "sha256:e30571ba606507dd4439ea7b455a115170923229f105614f0b044ed07a8ba832", "enabled": 1 } }, "portmap": { "100": { "checksum": "sha256:15e3c27ffaa1d13d785823bd82e53c398e0b5c96c4e0efce502137b326f3710e", "enabled": 1 } }, "portreserve": { "100": { "checksum": "sha256:b1795d2f89c94a43339f1b659f29173491eed1d079a21cc290fb53afb74efd1f", "enabled": 1 } }, "postfix": { "100": { "checksum": "sha256:abe82997f5573e9804f5b66de9f1b9120ccfd8ad1e3c0c3afd32a99f043a49ed", "enabled": 1 } }, "postgresql": { "100": { "checksum": "sha256:dd827cd694ba2389e77fee7137a741677590dc56dff0015fa6b58b93ab354c7c", "enabled": 1 } }, "postgrey": { "100": { "checksum": "sha256:698a04231cb1ca9a1a657c942f22b26ad57e06af78dbce2006dd9a7991d01b19", "enabled": 1 } }, "ppp": { "100": { "checksum": "sha256:640c55f9dc7e32054fc5dcb34e4080a848526a35cbc4225b32deec213e9906e4", "enabled": 1 } }, "prelink": { "100": { "checksum": "sha256:db21e33d0ffe48bd93a10808ea536532acad2578c499333de859afca9b4bfd5f", "enabled": 1 } }, "prelude": { "100": { "checksum": "sha256:2592c97f035c97e88768678244baa38c031afeb5e94730133a0a59e7b451aa57", "enabled": 1 } }, "privoxy": { "100": { "checksum": "sha256:2f9555f9f047d752841d98608cd372c9f9743fdaa88a59c96a0b2e85f9016b00", "enabled": 1 } }, "procmail": { "100": { "checksum": "sha256:ee1a9203e7dd07d84cfa3c20a2e069bb0552dc430a6d2feed2919e53731b1864", "enabled": 1 } }, "prosody": { "100": { "checksum": "sha256:360e32fc4cb037faf2b5684ce2d376c42cff14785fc95d82fd750fe66dabe1e2", "enabled": 1 } }, "psad": { "100": { "checksum": "sha256:9450644b2b34bbc91aa8d0c40b88fd2363137df218d38ccc6b28b94b63286d54", "enabled": 1 } }, "ptchown": { "100": { "checksum": "sha256:861c6b803141841f4756be8531775c7d37e8ec378b2b9e90f37e1932d35e5e36", "enabled": 1 } }, "publicfile": { "100": { "checksum": "sha256:a932f4ee30fe06d891909b328e9ed103a77010e273b19cd7e9debf3ba43204c6", "enabled": 1 } }, "pulseaudio": { "100": { "checksum": "sha256:005f82a614bbe1a6cca6611aee9b59d97e2f8b51c2b88b41d6f800145410a166", "enabled": 1 } }, "puppet": { "100": { "checksum": "sha256:0ac394773bec589974cc6ecc5f99dc5faf2871e771295cb84f33a86bdbd51a6d", "enabled": 1 } }, "pwauth": { "100": { "checksum": "sha256:4cabb0c5c75a395579d7523d3e7616db9fb0e1f40d3b9f581f6e94eebb049810", "enabled": 1 } }, "qatlib": { "100": { "checksum": "sha256:1ce83ef85ac222374d15e090a30072e96219bea1a6c1e72bb122842ebbec7cee", "enabled": 1 } }, "qmail": { "100": { "checksum": "sha256:7972bb152e68b8fdb1a77c84868b66e420365c9f526254fb272a7263500acbff", "enabled": 1 } }, "qpid": { "100": { "checksum": "sha256:7cd981f1dfd524edc5e5992f10cd1b3e723adcb3cfaf8c7bb42c8f5197e64378", "enabled": 1 } }, "quantum": { "100": { "checksum": "sha256:a25e72eac82204e6200ec843bc06d7b59a1ce7c755666bf1ed9effd12ee466d7", "enabled": 1 } }, "quota": { "100": { "checksum": "sha256:292ac0c56b89fb197298bc0cda18828e0c6c524414d7ccc38133e2552283136f", "enabled": 1 } }, "rabbitmq": { "100": { "checksum": "sha256:6ae09222a0b9aefa3ead90b1f1ee08972570b1f5e39d25c05108228e4d315d50", "enabled": 1 } }, "radius": { "100": { "checksum": "sha256:76e245eff00a30a5db15851804f5047caa072e5f418e44fd75e70fcaba611e73", "enabled": 1 } }, "radvd": { "100": { "checksum": "sha256:b0ab6f58a00a32e8119b2b598d56acdc5919c7d9414eec24beb5eca9d84c90dc", "enabled": 1 } }, "raid": { "100": { "checksum": "sha256:cea5ec9a6380cf597ff5f333d46b2a16568c3e71d059a326f645284c6e6c171e", "enabled": 1 } }, "rasdaemon": { "100": { "checksum": "sha256:f6d50ea9aa365ffb71b924d04e606e7e7be9fc0ff67899ad1fd348bb209a7ad1", "enabled": 1 } }, "rdisc": { "100": { "checksum": "sha256:d45a4dad3bf9f007f05eca41b5c04d4ae329405b60f307f8d6959fc92dee841e", "enabled": 1 } }, "readahead": { "100": { "checksum": "sha256:338d67a9fa343855efdd6d99bf0d28bdcf03631bdff7591c1a21e975471a510a", "enabled": 1 } }, "realmd": { "100": { "checksum": "sha256:e2efb27c1fed30b949be0f29464b99701353b8eb65bd2da624505c7fc1e586a4", "enabled": 1 } }, "redis": { "100": { "checksum": "sha256:642738508f133e3709992b5542f048755e1ca6bdd1c3eed8fbf6de60135fe754", "enabled": 1 } }, "remotelogin": { "100": { "checksum": "sha256:31f6f5efb0759335de46ac3ab4c8a64955f838afc9743a20f2e81a8cb54cb36a", "enabled": 1 } }, "restraint": { "400": { "checksum": "sha256:95d0d03fbc1d4147f02a0b3da7cd76efbdd75d1f5812cf6160e922336abbf270", "enabled": 1 } }, "rhcd": { "100": { "checksum": "sha256:439afb2b8b905d32073d55ee61bb5ebc085ac1dd2c4bbb6b25f90051827dc6d2", "enabled": 1 } }, "rhcs": { "100": { "checksum": "sha256:d90d617944f40798165d77873629210aed1662ccc21b8687d92b63eaac5ace8c", "enabled": 1 } }, "rhev": { "100": { "checksum": "sha256:4c8dc74bd9e7cebd9f78bae013483cdc6350fb8eb90debbf2888a89a0c30232c", "enabled": 1 } }, "rhgb": { "100": { "checksum": "sha256:4b222d1ae2a6931560eca08e087c56c7835ce79c3f82514c5c6d3e98a7de89b1", "enabled": 1 } }, "rhnsd": { "100": { "checksum": "sha256:12ea9c8c79218c6ef2da68bbe6f321ca6a7247a0f40142f1be8b85cb5b84d54d", "enabled": 1 } }, "rhsmcertd": { "100": { "checksum": "sha256:1f2f53ec9801fa0fff759f87f132d50b84cd6eb10350ac7fb5f0ea3c5fa09121", "enabled": 1 } }, "rhts": { "400": { "checksum": "sha256:39f15722a115b11064c20b8bc2758e8fe06531a8f923ea00a3e7079a0554e3d6", "enabled": 1 } }, "ricci": { "100": { "checksum": "sha256:c9afadeea7fc4762d923383de22df24de13563a6dbe205b9ab73a6ad0a5cff79", "enabled": 1 } }, "rkhunter": { "100": { "checksum": "sha256:5f31c86e2f2cc425040785cc22a9040c7cdb80bd1145dfd668d2b2597534d6e3", "enabled": 1 } }, "rkt": { "100": { "checksum": "sha256:6154fe3dc060aa2df1f629434a417621e043836c3fc0d1cfe128db80a0c3a5d9", "enabled": 1 } }, "rlogin": { "100": { "checksum": "sha256:2001491066517e9d1718973ef9ce30640101b146abaac53218e9fafc5838eba5", "enabled": 1 } }, "rngd": { "100": { "checksum": "sha256:932eb717c50cc3b89b0d39754e8d42285224c7112dc8fb773b979ae335c61b81", "enabled": 1 } }, "rolekit": { "100": { "checksum": "sha256:08975182ac9221887a5cec3f1e51639802794cfa718d6bfe245aa5f132bac74d", "enabled": 1 } }, "roundup": { "100": { "checksum": "sha256:931c928c6bf89625bce5b6cb76084615486f91d6e86a09fcaf007c47c5e68ba7", "enabled": 1 } }, "rpc": { "100": { "checksum": "sha256:0620e84d3a712ecc8e401e1ce695e4a5c1dd33c2097d6cec8127273e2063e36a", "enabled": 1 } }, "rpcbind": { "100": { "checksum": "sha256:3368e81d1e4c4368ed76661f4b636f38651dcc804843c4b1ef0f8ea0eff45d0d", "enabled": 1 } }, "rpm": { "100": { "checksum": "sha256:b1d6f8003dfb181c492f970957c0fdbac2625661fbec8ac0925a6c9b141429c0", "enabled": 1 } }, "rrdcached": { "100": { "checksum": "sha256:340e748f2da66a79889ffe2d77224bce0090a2954743d5c1098ac3cb1eb48866", "enabled": 1 } }, "rshd": { "100": { "checksum": "sha256:58d4cffb8b490c67aa4b1a335b0a3b933af4dfe6cc022fa0bc7400841d1efba7", "enabled": 1 } }, "rshim": { "100": { "checksum": "sha256:34695491986303ceda8be41882d8279b3065627df85b9ff616650c420c6395b8", "enabled": 1 } }, "rssh": { "100": { "checksum": "sha256:26f6c19589d58fd23c303ac699697517d6883a9531837ad406e2f09b7507278d", "enabled": 1 } }, "rsync": { "100": { "checksum": "sha256:0aa06de248b996ddd0afc67811e82a96bde2dc7a2c328ecbbf6c6a5c9c780784", "enabled": 1 } }, "rtas": { "100": { "checksum": "sha256:3cf7dfe541071b3898cd76674ccce511cb2b25626b0e5f9e43fb903f928508c5", "enabled": 1 } }, "rtkit": { "100": { "checksum": "sha256:d8e666993d2c3c43a5efc6628d04fed230f380cec2feafc9fb1eb305239ad954", "enabled": 1 } }, "rwho": { "100": { "checksum": "sha256:c66a34fe691a84d52c8dd62ed1e9c8525796ccb3114743202a1d11dab4397c1b", "enabled": 1 } }, "samba": { "100": { "checksum": "sha256:76d4803f3945be392c2ba2f3c801fc3a8d629ea342e2352aa2100f3cb70c6a65", "enabled": 1 } }, "sambagui": { "100": { "checksum": "sha256:4c91ce35da807c4194c847e713628c8c5a8c2edabbc32800a9f0245175d61f9a", "enabled": 1 } }, "sandboxX": { "100": { "checksum": "sha256:dd4e4f83f573e60a00c4c8f9fc6d281307ef29ab033281ec1d3bfe763ac842ef", "enabled": 1 } }, "sanlock": { "100": { "checksum": "sha256:12903fc32000cd4e9785975352f7cfc11ebb15acfbe277be575edb318b15131f", "enabled": 1 } }, "sasl": { "100": { "checksum": "sha256:5a49a242fb57fbde3ec1d0b7c33432f0beb2992005df650c1387cd727cfc31a8", "enabled": 1 } }, "sbd": { "100": { "checksum": "sha256:6b08742937f98065198547ba01efe084ce27119742dbfb22ffaefb651adcd4c5", "enabled": 1 } }, "sblim": { "100": { "checksum": "sha256:cf43ae0bb1072dfbd3f191147458ab654f4bf36c7e6e52b3ddc5e21d565e527b", "enabled": 1 } }, "screen": { "100": { "checksum": "sha256:5c749a6c028d95eabf66a4af046f252d580a96be344cc6a4cbaf6c41bc3d7052", "enabled": 1 } }, "secadm": { "100": { "checksum": "sha256:fbd918572c57ed1724375e81afce5d108e52933bcd98b454196a51fa9af36c7f", "enabled": 1 } }, "sectoolm": { "100": { "checksum": "sha256:35e4d2a3208b8c0d74c4016309c3447efac46618ce4209c78af9861f95cf36c6", "enabled": 1 } }, "selinuxutil": { "100": { "checksum": "sha256:3d5c69fa4c3932728225e61fa1eeb458ff2a29a2876c739d1e203322dc4dc7b3", "enabled": 1 } }, "sendmail": { "100": { "checksum": "sha256:b74fad36b6b99dce25b9723b5c83278e4cf0ad7a66b7f2d1788fa930737b6b26", "enabled": 1 } }, "sensord": { "100": { "checksum": "sha256:cf1265945526b166650edb71077e9778d22a9a35e59b12e1edae8b233e8e656b", "enabled": 1 } }, "setrans": { "100": { "checksum": "sha256:95193c003b42fd26e548de51bb8652289fc2fc66bd38f571c6e7173befc6e33d", "enabled": 1 } }, "setroubleshoot": { "100": { "checksum": "sha256:60686aef35513ad652ac553180e26c9864b890a3a5f442a091375b00f886b443", "enabled": 1 } }, "seunshare": { "100": { "checksum": "sha256:a8c3633cf363e103e840a32cace7793040117dbc627ae5ebdbb4509f79273cef", "enabled": 1 } }, "sge": { "100": { "checksum": "sha256:a78576b227ed6d26f57c3ee3bceb45529e50e6662914cf22ad89cc4dedce2251", "enabled": 1 } }, "shorewall": { "100": { "checksum": "sha256:f7c098e79dd886767fe6d5ae4e564d458a4b1243e99c0c74d8c36432e138599a", "enabled": 1 } }, "slocate": { "100": { "checksum": "sha256:cda143abf21914c24f0e38d8e9b9cb2668c88b44cb4df711004658dd53d3e902", "enabled": 1 } }, "slpd": { "100": { "checksum": "sha256:ba397cdd0a7a730fc70c7cad3cbe4ed2475c670b65ee5f9914aeb08b58200845", "enabled": 1 } }, "smartmon": { "100": { "checksum": "sha256:8ef416256baedc5250ebe2cee413dec8082c9dcb2e9d11a4efa164326551eae8", "enabled": 1 } }, "smokeping": { "100": { "checksum": "sha256:81de53921f1a522847961acb56f77dbad54284bcf8841f52afe11ced904a20d7", "enabled": 1 } }, "smoltclient": { "100": { "checksum": "sha256:95c7ee276c5baae8d1a63c94a349ea997f421cebdf73d013ab4b90b34d986fd0", "enabled": 1 } }, "smsd": { "100": { "checksum": "sha256:d871da983b281e8a8daec281efefbc6f6bf1ce5d5cbea4b352b49a08b0d89c77", "enabled": 1 } }, "snapper": { "100": { "checksum": "sha256:4e5c5d73145bbed79c08e82d0a21a965d8cb7a92db76b202b5d134fa42c95fe1", "enabled": 1 } }, "snmp": { "100": { "checksum": "sha256:af32d5989dfb265da12ba1be45f553eb0cca6ad12097540b811f7c30fbb208fe", "enabled": 1 } }, "snort": { "100": { "checksum": "sha256:b1696f79ceb514e5ffed2caea4f90f783c94ca4a73a22f278e48438182d4f07e", "enabled": 1 } }, "sosreport": { "100": { "checksum": "sha256:8059bae88c09a10f717526e476d1a4007db637fea6adc08fad6b17449dbc9e4d", "enabled": 1 } }, "soundserver": { "100": { "checksum": "sha256:3d50318423313efcf8160d08a3bd655e53f34b30374d9dc30ad5bea5cc7e0776", "enabled": 1 } }, "spamassassin": { "100": { "checksum": "sha256:4054ceef428a1df14d5fd4c620fce5d039a236500c1368a7a6e4bb07135d307a", "enabled": 1 } }, "speech-dispatcher": { "100": { "checksum": "sha256:b08147b36ed3fa54428c34c8ac8e2781717ecb453c2372760d41a7738b7757da", "enabled": 1 } }, "squid": { "100": { "checksum": "sha256:d6ec108e7f32a9cfe6f024cd5287a9b020783887f6a71e03b113eab3304342ab", "enabled": 1 } }, "ssh": { "100": { "checksum": "sha256:97b2011b254075f1c3ef158248c3d5d873e56873abe9fc9110438508cb252edd", "enabled": 1 } }, "sslh": { "100": { "checksum": "sha256:5f14c1526eaa63f5176c2e6401410159cacd7f06f89d3c37c6670f5f3193eaa4", "enabled": 1 } }, "sssd": { "100": { "checksum": "sha256:470d9136be0a7f505b5340603f17cdda4c421ef6fe1e646355107e1bdd7989e9", "enabled": 1 } }, "staff": { "100": { "checksum": "sha256:79e0bd24243575137ccd1ed2bb13e1a164c1590f4a72e89c7b46ed2d06fbb8f3", "enabled": 1 } }, "stalld": { "100": { "checksum": "sha256:9ef028437851996db7b3152ef672f991fde6cd464194eef73c1670c33cdb274f", "enabled": 1 } }, "stapserver": { "100": { "checksum": "sha256:0a08f155a5545909cceeb2c2221dcee1980385b52a4afd3f8b8f6704617d14a5", "enabled": 1 } }, "stratisd": { "100": { "checksum": "sha256:46468db34c31c668f4b213b8ed14fcfb53e1e183431f6237364acf686d83b8f0", "enabled": 1 } }, "stunnel": { "100": { "checksum": "sha256:b24d3a5bcd4ecd35cd51836670ce97820a48a3669c0d590e716aee30709dc1f7", "enabled": 1 } }, "su": { "100": { "checksum": "sha256:858e8488feb83d85137a778c3b8c5140d9bc7d9b83a43077a23fbfd27091349c", "enabled": 1 } }, "sudo": { "100": { "checksum": "sha256:c274be08afec52e985bfc508199ef983f1f2eab41bf9b72b0921aa0276e47a51", "enabled": 1 } }, "svnserve": { "100": { "checksum": "sha256:2a78595b73c7ea25c5b395ec91f18b3dad58002dd8ef3652d69edd5a8c13f3f5", "enabled": 1 } }, "swift": { "100": { "checksum": "sha256:ccacc18e643d0ca081b36d910abb0ad6fae2acdd1f92a52b4fc9004fc31f4677", "enabled": 1 } }, "sysadm": { "100": { "checksum": "sha256:641f7e064ae6276e0214ecc099d32001faca806eefc62d5505370914a0fc496c", "enabled": 1 } }, "sysadm_secadm": { "100": { "checksum": "sha256:8b3d56e43b270a3c8e85c91782ceb793a817dadff0af988ed17ec41251b9b315", "enabled": 1 } }, "sysnetwork": { "100": { "checksum": "sha256:6cf6d7c76e8239bbd876a16bf8321e68680308e33cf8a88e89236172e07d852e", "enabled": 1 } }, "sysstat": { "100": { "checksum": "sha256:e35d3df921d581298273023c20b12e4d1168c249b06e08458a27732a4ff082ce", "enabled": 1 } }, "systemd": { "100": { "checksum": "sha256:f1ad5434a7a4d19642b3c3af71866ee2ca3f4d3578dd9ed8ab410131b6372b6c", "enabled": 1 } }, "tangd": { "100": { "checksum": "sha256:b09965ae1db2d4aeab2d1c8775897288e6d55224183205ed48002daa816d2bbd", "enabled": 1 } }, "targetd": { "100": { "checksum": "sha256:8d31e97d05e23cbca57938b58d725c33af5e6aaa3e563971a398936cd3a21b36", "enabled": 1 } }, "tcpd": { "100": { "checksum": "sha256:2ea50e3363cefebf4b9bbcb6bcdd9e923da455c54a14c509f8c5ba9ca4da1c5d", "enabled": 1 } }, "tcsd": { "100": { "checksum": "sha256:a4d57e1e23ddfdbf6977fae56c0797b5791bd7a03bc3e7da5ae87d6f9ca870cc", "enabled": 1 } }, "telepathy": { "100": { "checksum": "sha256:98fac790af3d7a87e75899e112ba5d4cd2455261e44b60f1a0d7387ba0e0ad49", "enabled": 1 } }, "telnet": { "100": { "checksum": "sha256:e531681ec043e98c7aa5feeb5948eb339c71c657dd9a93a7dc35909aee56de2b", "enabled": 1 } }, "tftp": { "100": { "checksum": "sha256:7746597ed5d872b63e1e842421771ebacdcde5288bfd6d006346ad554e4799e7", "enabled": 1 } }, "tgtd": { "100": { "checksum": "sha256:e599de319c72ac9f8ca525508c1d2ce54b46a85f378b56b04ff8fa1781250c60", "enabled": 1 } }, "thin": { "100": { "checksum": "sha256:9825f5ea5ecf0720ae08c5fb7a50d3318b3dfb520801cf5ec8c0663364df5a62", "enabled": 1 } }, "thumb": { "100": { "checksum": "sha256:967c4a8d81c00053b35bd61cd547e73dba57abbdd31de06c242c46fac0aff410", "enabled": 1 } }, "timedatex": { "100": { "checksum": "sha256:127a1f551291ce8c39e638b32770fdeadda67312a1c16dc1fad7da9b5a4f5c01", "enabled": 1 } }, "tlp": { "100": { "checksum": "sha256:c3135382d1212256d9ad9ce4c893271cc8093256fdfd34e0a37d9912dc75b16d", "enabled": 1 } }, "tmpreaper": { "100": { "checksum": "sha256:5650adc4d1a2c1db0f3733e73fe7b7b9aa4fda69a1d3308e0b59a8c4cf30c5f5", "enabled": 1 } }, "tomcat": { "100": { "checksum": "sha256:59312967f089bd0efa786367f1587ca124a49b7eb0ba7b5f21c2d66bc66e1b1b", "enabled": 1 } }, "tor": { "100": { "checksum": "sha256:210d53406ae0727c0243c597928358124d2c44c16bf33a47767a3ac88227414b", "enabled": 1 } }, "tuned": { "100": { "checksum": "sha256:758f35f5b9f4195f564792cf864a87a9b6e8de155fc2d348d4eec0ca30d8463d", "enabled": 1 } }, "tvtime": { "100": { "checksum": "sha256:59a579d32a09dd28b983fdaaa6f695be68dfbe0273ee26bd8540e116d07084b4", "enabled": 1 } }, "udev": { "100": { "checksum": "sha256:acb78447445a0d74b28f8e7b5db170464c56476be775e8d2577b04cb55ce3f69", "enabled": 1 } }, "ulogd": { "100": { "checksum": "sha256:fbd1de444b4bfe75df73aee4560955dac1378eb7e5654d05d849c67db43546b5", "enabled": 1 } }, "uml": { "100": { "checksum": "sha256:c05987c6a9f49b3370c011d79431dc52d6d435e89a577d7d10a8db02587a3786", "enabled": 1 } }, "unconfined": { "100": { "checksum": "sha256:be7e30d2c6b24f9d403c9b96acbf9258310b570cb8bf11fdf05166b4b4144664", "enabled": 1 } }, "unconfineduser": { "100": { "checksum": "sha256:e68cce1640c75ed26848cf3b931a549489011fcb9af121621dbaa578fd71cb3e", "enabled": 1 } }, "unlabelednet": { "100": { "checksum": "sha256:1b37d8de7cf505da7e184ad33a2d04904d9fc51d64707b5d7e449b9d615deaa2", "enabled": 1 } }, "unprivuser": { "100": { "checksum": "sha256:c66e3433658ad0f732140415ba0489902843bd7a789f34b3dacee0cb51372b80", "enabled": 1 } }, "updfstab": { "100": { "checksum": "sha256:e698b5479929ff28896c7e38efc410a47b6a5cd38eafd56c0c8de7fa210d16e3", "enabled": 1 } }, "usbmodules": { "100": { "checksum": "sha256:00518ce2d8f4e15cf7186a7467c8710848f2312adbbfb919cd6bed45d40d10d4", "enabled": 1 } }, "usbmuxd": { "100": { "checksum": "sha256:495bbffef9f008519da7918d89fdb853bab29a6c718ed72b13496574d16a18af", "enabled": 1 } }, "userdomain": { "100": { "checksum": "sha256:3fd80d33fa8d00faaa71556b252c45f509297db070ec8fd764621f29383ec539", "enabled": 1 } }, "userhelper": { "100": { "checksum": "sha256:ec2502e379bebb8c8b29386cb8324f0fe8b3935bf065702a1b0a3cf4224e9798", "enabled": 1 } }, "usermanage": { "100": { "checksum": "sha256:35178826cea105b5cbec665c4032d007e453cc6897c787d14b00700e37ac5cdc", "enabled": 1 } }, "usernetctl": { "100": { "checksum": "sha256:24d457c012fb774bde2ecfb530699e6b4768604525b33f8d3cc99844f53918bb", "enabled": 1 } }, "uucp": { "100": { "checksum": "sha256:5451b95ffd75ed6e3a933bc0518e3dd2e9dfad315b4a6d3b08653469e8168668", "enabled": 1 } }, "uuidd": { "100": { "checksum": "sha256:7e39bde0db4bb71a014b04fe58282e134c94c1e753bd1b15e3e7f64b016bcb51", "enabled": 1 } }, "varnishd": { "100": { "checksum": "sha256:b6a3a26dac9580a586502165719438a72e9d41f72ff3d21eb05225a6d368f296", "enabled": 1 } }, "vdagent": { "100": { "checksum": "sha256:f513668cef5d03425903904f53f97a3640fa138faa4d1c13192d46b752b78c71", "enabled": 1 } }, "vhostmd": { "100": { "checksum": "sha256:bc8ddc6407a2ff415e265da8ff8609013d8d173979235acf4fcd1cbfc313a571", "enabled": 1 } }, "virt": { "100": { "checksum": "sha256:b4700a4212a1204155f5cfb354ab5728d03bc5676a834bd0383481b2226d3950", "enabled": 1 } }, "virt_supplementary": { "100": { "checksum": "sha256:d80a998844ea68164a2ac00d46b8c5ce51107066dc44a30ab41057a4e3ac259e", "enabled": 1 } }, "vlock": { "100": { "checksum": "sha256:ec8354aba09fd7ab45ed59a737034196f8b6c4df25e6079c4a00985e36b15f92", "enabled": 1 } }, "vmtools": { "100": { "checksum": "sha256:2b632a2cf0d1be1e6bffaf66a320cfd73cd3cb175711cfcab9ff77147838db40", "enabled": 1 } }, "vmware": { "100": { "checksum": "sha256:70cd067efc6e8ae5416b382c4b056f7a723459bc1c4e2e7f24fdb757d341845d", "enabled": 1 } }, "vnstatd": { "100": { "checksum": "sha256:fc8312deb1ef563cfbf8da562e6afba6dd16f4adc97e027c294ff06b1f1ef29f", "enabled": 1 } }, "vpn": { "100": { "checksum": "sha256:e1a9309d48b6463d32450421e867adadb08adbd641eaa072dcb5cd3d629f5dae", "enabled": 1 } }, "w3c": { "100": { "checksum": "sha256:6b19f10f4a51f6f1f343b082d4a96c3335b191f00d41152ae090627f727b8360", "enabled": 1 } }, "watchdog": { "100": { "checksum": "sha256:1171d89af3fa7fc84808f2b6f44fa9a6f8c161198fd3e326fa0f6f63b3820aac", "enabled": 1 } }, "wdmd": { "100": { "checksum": "sha256:9d1a40c730927da34e987d2aae6018f328fd6e9bf2d6134644515f50a359eb8c", "enabled": 1 } }, "webadm": { "100": { "checksum": "sha256:f836d4af3abab47d1c5afe5f6a83ca9303506403e75e65f10fe4c4c719dee46a", "enabled": 1 } }, "webalizer": { "100": { "checksum": "sha256:43f69260eb8c399ceb227825b190ee1758a60b1b78052b1fbfe21d5fce8daab5", "enabled": 1 } }, "wine": { "100": { "checksum": "sha256:204369c49480adaf6c3bbee72b72ec17a5fac47821e371ac9b0e633666af050e", "enabled": 1 } }, "wireguard": { "100": { "checksum": "sha256:ec1197ebeb6bdbc525a66b7e661a8f7a58fe0a4cbf09ba7df7ee055d23746c1c", "enabled": 1 } }, "wireshark": { "100": { "checksum": "sha256:04f7d964a66c88c0ed6af6d352fa81af6618f3a16c0db0363e6f2df07d9ea83a", "enabled": 1 } }, "xen": { "100": { "checksum": "sha256:e79e6baf86bd76cda73219fd891bd706c38ecb6e37a92c24ba4291b28e8782de", "enabled": 1 } }, "xguest": { "100": { "checksum": "sha256:710ba99d8d809150194ab9c84f7e292435f65de06702b0fe6e9bdfa362076621", "enabled": 1 } }, "xserver": { "100": { "checksum": "sha256:05105fee82ae1decbb668b154e32f643eda2779459b0e35edcfa358996436147", "enabled": 1 } }, "zabbix": { "100": { "checksum": "sha256:a7eab2820c4bf5c9e51dd30942fce48426c0e35c25d5c13efda6e9313602a66f", "enabled": 1 } }, "zarafa": { "100": { "checksum": "sha256:b1738ace3c35a58867613fabb433a761136afae86bab322ea4d192436c5b0ddd", "enabled": 1 } }, "zebra": { "100": { "checksum": "sha256:913b9c2802fc6607d811ffc278dff3fb84d81942603cf21e1b4efcebf6a7529e", "enabled": 1 } }, "zoneminder": { "100": { "checksum": "sha256:19a33723d291446ee9617d0120088d7bae884e5a963c48a8afec20fc6bacc4bc", "enabled": 1 } }, "zosremote": { "100": { "checksum": "sha256:0e85101587e037fa8552703dfe40e3c31d4b86d65981d03ca3bcb9f91cde9e06", "enabled": 1 } } }, "selinux_priorities": true }, "changed": false } TASK [fedora.linux_system_roles.selinux : Load SELinux modules] **************** task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:115 Saturday 02 November 2024 08:49:15 -0400 (0:00:03.531) 0:00:21.492 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "selinux_modules is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Restore SELinux labels on filesystem tree] *** task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:128 Saturday 02 November 2024 08:49:15 -0400 (0:00:00.044) 0:00:21.537 ***** skipping: [managed-node3] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.selinux : Restore SELinux labels on filesystem tree in check mode] *** task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:136 Saturday 02 November 2024 08:49:15 -0400 (0:00:00.022) 0:00:21.559 ***** skipping: [managed-node3] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.nbde_server : Stat the tangd custom port systemd directory] *** task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/tangd-custom-port.yml:14 Saturday 02 November 2024 08:49:15 -0400 (0:00:00.032) 0:00:21.592 ***** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1730551739.7111914, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "ctime": 1730551735.5871959, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 532676801, "isblk": false, "ischr": false, "isdir": true, "isfifo": false, "isgid": false, "islnk": false, "isreg": false, "issock": false, "isuid": false, "mimetype": "inode/directory", "mode": "0775", "mtime": 1730551735.5871959, "nlink": 2, "path": "/etc/systemd/system/tangd.socket.d", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 28, "uid": 0, "version": "2485065829", "wgrp": true, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.nbde_server : Get a list of files in the tangd custom directory] *** task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/tangd-custom-port.yml:19 Saturday 02 November 2024 08:49:15 -0400 (0:00:00.354) 0:00:21.947 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "nbde_server_port | int == 80", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.nbde_server : Manage tangd custom port systemd directory] *** task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/tangd-custom-port.yml:35 Saturday 02 November 2024 08:49:16 -0400 (0:00:00.044) 0:00:21.991 ***** ok: [managed-node3] => { "changed": false, "gid": 0, "group": "root", "mode": "0775", "owner": "root", "path": "/etc/systemd/system/tangd.socket.d", "secontext": "unconfined_u:object_r:systemd_unit_file_t:s0", "size": 28, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.nbde_server : Creates the file with the port entry that we want tangd to listen to] *** task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/tangd-custom-port.yml:44 Saturday 02 November 2024 08:49:16 -0400 (0:00:00.381) 0:00:22.372 ***** changed: [managed-node3] => { "changed": true, "checksum": "cab519df8c21e60fd06ac780e2c7bd41ad441042", "dest": "/etc/systemd/system/tangd.socket.d/override.conf", "gid": 0, "group": "root", "md5sum": "fc727969e0bd264a9cc7f9c6bc56714c", "mode": "0644", "owner": "root", "secontext": "system_u:object_r:tangd_unit_file_t:s0", "size": 90, "src": "/root/.ansible/tmp/ansible-tmp-1730551756.4544008-10995-180496254537908/.source.conf", "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.nbde_server : Set flag to to tell main that the port has changed] *** task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/tangd-custom-port.yml:53 Saturday 02 November 2024 08:49:17 -0400 (0:00:00.675) 0:00:23.048 ***** ok: [managed-node3] => { "ansible_facts": { "__nbde_server_port_changed": true }, "changed": false } TASK [Ensure the desired port is added to firewalld] *************************** task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/tangd-custom-port.yml:57 Saturday 02 November 2024 08:49:17 -0400 (0:00:00.031) 0:00:23.080 ***** included: fedora.linux_system_roles.firewall for managed-node3 TASK [fedora.linux_system_roles.firewall : Setup firewalld] ******************** task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:2 Saturday 02 November 2024 08:49:17 -0400 (0:00:00.106) 0:00:23.186 ***** included: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml for managed-node3 TASK [fedora.linux_system_roles.firewall : Ensure ansible_facts used by role] *** task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:2 Saturday 02 November 2024 08:49:17 -0400 (0:00:00.047) 0:00:23.234 ***** ok: [managed-node3] TASK [fedora.linux_system_roles.firewall : Check if system is ostree] ********** task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:10 Saturday 02 November 2024 08:49:17 -0400 (0:00:00.502) 0:00:23.736 ***** ok: [managed-node3] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.firewall : Set flag to indicate system is ostree] *** task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:15 Saturday 02 November 2024 08:49:18 -0400 (0:00:00.380) 0:00:24.117 ***** ok: [managed-node3] => { "ansible_facts": { "__firewall_is_ostree": false }, "changed": false } TASK [fedora.linux_system_roles.firewall : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:22 Saturday 02 November 2024 08:49:18 -0400 (0:00:00.038) 0:00:24.155 ***** ok: [managed-node3] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.firewall : Set flag if transactional-update exists] *** task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:27 Saturday 02 November 2024 08:49:18 -0400 (0:00:00.353) 0:00:24.508 ***** ok: [managed-node3] => { "ansible_facts": { "__firewall_is_transactional": false }, "changed": false } TASK [fedora.linux_system_roles.firewall : Install firewalld] ****************** task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:31 Saturday 02 November 2024 08:49:18 -0400 (0:00:00.038) 0:00:24.546 ***** ok: [managed-node3] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: firewalld TASK [fedora.linux_system_roles.firewall : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:43 Saturday 02 November 2024 08:49:19 -0400 (0:00:01.074) 0:00:25.620 ***** skipping: [managed-node3] => { "false_condition": "__firewall_is_transactional | d(false)" } TASK [fedora.linux_system_roles.firewall : Reboot transactional update systems] *** task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:48 Saturday 02 November 2024 08:49:19 -0400 (0:00:00.041) 0:00:25.662 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__firewall_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Fail if reboot is needed and not set] *** task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:53 Saturday 02 November 2024 08:49:19 -0400 (0:00:00.042) 0:00:25.705 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__firewall_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Collect service facts] ************** task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:5 Saturday 02 November 2024 08:49:19 -0400 (0:00:00.050) 0:00:25.755 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Attempt to stop and disable conflicting services] *** task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:9 Saturday 02 November 2024 08:49:19 -0400 (0:00:00.059) 0:00:25.815 ***** skipping: [managed-node3] => (item=nftables) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": "nftables", "skip_reason": "Conditional result was False" } skipping: [managed-node3] => (item=iptables) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": "iptables", "skip_reason": "Conditional result was False" } skipping: [managed-node3] => (item=ufw) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": "ufw", "skip_reason": "Conditional result was False" } skipping: [managed-node3] => { "changed": false } MSG: All items skipped TASK [fedora.linux_system_roles.firewall : Unmask firewalld service] *********** task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:22 Saturday 02 November 2024 08:49:19 -0400 (0:00:00.077) 0:00:25.892 ***** ok: [managed-node3] => { "changed": false, "name": "firewalld", "status": { "AccessSELinuxContext": "system_u:object_r:firewalld_unit_file_t:s0", "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "dbus-broker.service sysinit.target polkit.service basic.target system.slice dbus.socket", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "shutdown.target network-pre.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "BusName": "org.fedoraproject.FirewallD1", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "yes", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_tty_config cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "ip6tables.service shutdown.target ebtables.service iptables.service ipset.service", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "firewalld - dynamic firewall daemon", "DeviceAllow": "char-rtc r", "DevicePolicy": "closed", "Documentation": "\"man:firewalld(1)\"", "DynamicUser": "no", "EffectiveMemoryHigh": "3698376704", "EffectiveMemoryMax": "3698376704", "EffectiveTasksMax": "22336", "EnvironmentFiles": "/etc/sysconfig/firewalld (ignore_errors=yes)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecReload": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecReloadEx": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStart": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/usr/lib/systemd/system/firewalld.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "firewalld.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13960", "LimitNPROCSoft": "13960", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13960", "LimitSIGPENDINGSoft": "13960", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "yes", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3290501120", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "yes", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "[not set]", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "[not set]", "MemorySwapMax": "infinity", "MemorySwapPeak": "[not set]", "MemoryZSwapCurrent": "[not set]", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "firewalld.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "yes", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "yes", "ProtectControlGroups": "yes", "ProtectHome": "yes", "ProtectHostname": "yes", "ProtectKernelLogs": "yes", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "yes", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "dbus.socket system.slice sysinit.target", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "yes", "RestrictSUIDSGID": "yes", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "StandardError": "null", "StandardInput": "null", "StandardOutput": "null", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallArchitectures": "native", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22336", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "dbus", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "enabled", "UnitFileState": "disabled", "UtmpMode": "init", "Wants": "network-pre.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity" } } TASK [fedora.linux_system_roles.firewall : Enable and start firewalld service] *** task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:28 Saturday 02 November 2024 08:49:20 -0400 (0:00:00.554) 0:00:26.447 ***** changed: [managed-node3] => { "changed": true, "enabled": true, "name": "firewalld", "state": "started", "status": { "AccessSELinuxContext": "system_u:object_r:firewalld_unit_file_t:s0", "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "basic.target system.slice polkit.service dbus-broker.service sysinit.target dbus.socket", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "shutdown.target network-pre.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "BusName": "org.fedoraproject.FirewallD1", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "yes", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_tty_config cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target ip6tables.service ebtables.service ipset.service iptables.service", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "firewalld - dynamic firewall daemon", "DeviceAllow": "char-rtc r", "DevicePolicy": "closed", "Documentation": "\"man:firewalld(1)\"", "DynamicUser": "no", "EffectiveMemoryHigh": "3698376704", "EffectiveMemoryMax": "3698376704", "EffectiveTasksMax": "22336", "EnvironmentFiles": "/etc/sysconfig/firewalld (ignore_errors=yes)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecReload": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecReloadEx": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStart": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/usr/lib/systemd/system/firewalld.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "firewalld.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13960", "LimitNPROCSoft": "13960", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13960", "LimitSIGPENDINGSoft": "13960", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "yes", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3291049984", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "yes", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "[not set]", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "[not set]", "MemorySwapMax": "infinity", "MemorySwapPeak": "[not set]", "MemoryZSwapCurrent": "[not set]", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "firewalld.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "yes", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "yes", "ProtectControlGroups": "yes", "ProtectHome": "yes", "ProtectHostname": "yes", "ProtectKernelLogs": "yes", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "yes", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "system.slice dbus.socket sysinit.target", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "yes", "RestrictSUIDSGID": "yes", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "StandardError": "null", "StandardInput": "null", "StandardOutput": "null", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallArchitectures": "native", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22336", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "dbus", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "enabled", "UnitFileState": "disabled", "UtmpMode": "init", "Wants": "network-pre.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity" } } TASK [fedora.linux_system_roles.firewall : Check if previous replaced is defined] *** task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:34 Saturday 02 November 2024 08:49:21 -0400 (0:00:01.129) 0:00:27.576 ***** ok: [managed-node3] => { "ansible_facts": { "__firewall_previous_replaced": false, "__firewall_python_cmd": "/usr/bin/python3.12", "__firewall_report_changed": true }, "changed": false } TASK [fedora.linux_system_roles.firewall : Get config files, checksums before and remove] *** task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:43 Saturday 02 November 2024 08:49:21 -0400 (0:00:00.085) 0:00:27.662 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__firewall_previous_replaced | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Tell firewall module it is able to report changed] *** task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:55 Saturday 02 November 2024 08:49:21 -0400 (0:00:00.059) 0:00:27.721 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__firewall_previous_replaced | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Configure firewall] ***************** task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:71 Saturday 02 November 2024 08:49:21 -0400 (0:00:00.064) 0:00:27.785 ***** changed: [managed-node3] => (item={'port': '7500/tcp', 'zone': 'public', 'state': 'enabled', 'immediate': True, 'permanent': True}) => { "__firewall_changed": true, "ansible_loop_var": "item", "changed": true, "item": { "immediate": true, "permanent": true, "port": "7500/tcp", "state": "enabled", "zone": "public" } } TASK [fedora.linux_system_roles.firewall : Gather firewall config information] *** task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:120 Saturday 02 November 2024 08:49:22 -0400 (0:00:00.696) 0:00:28.482 ***** skipping: [managed-node3] => (item={'port': '7500/tcp', 'zone': 'public', 'state': 'enabled', 'immediate': True, 'permanent': True}) => { "ansible_loop_var": "item", "changed": false, "false_condition": "'detailed' in fw[0]", "item": { "immediate": true, "permanent": true, "port": "7500/tcp", "state": "enabled", "zone": "public" }, "skip_reason": "Conditional result was False" } skipping: [managed-node3] => { "changed": false } MSG: All items skipped TASK [fedora.linux_system_roles.firewall : Update firewalld_config fact] ******* task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:130 Saturday 02 November 2024 08:49:22 -0400 (0:00:00.063) 0:00:28.545 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "'detailed' in fw[0]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Gather firewall config if no arguments] *** task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:139 Saturday 02 November 2024 08:49:22 -0400 (0:00:00.052) 0:00:28.597 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "firewall == None or firewall | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Update firewalld_config fact] ******* task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:144 Saturday 02 November 2024 08:49:22 -0400 (0:00:00.042) 0:00:28.640 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "firewall == None or firewall | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Get config files, checksums after] *** task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:153 Saturday 02 November 2024 08:49:22 -0400 (0:00:00.050) 0:00:28.691 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__firewall_previous_replaced | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Calculate what has changed] ********* task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:163 Saturday 02 November 2024 08:49:22 -0400 (0:00:00.062) 0:00:28.754 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__firewall_previous_replaced | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Show diffs] ************************* task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:169 Saturday 02 November 2024 08:49:22 -0400 (0:00:00.046) 0:00:28.801 ***** skipping: [managed-node3] => { "false_condition": "__firewall_previous_replaced | bool" } TASK [fedora.linux_system_roles.nbde_server : Reload the daemons so the new changes take effect] *** task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/main-tang.yml:34 Saturday 02 November 2024 08:49:22 -0400 (0:00:00.064) 0:00:28.865 ***** ok: [managed-node3] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.nbde_server : Ensure required services are enabled and at the right state] *** task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/main-tang.yml:39 Saturday 02 November 2024 08:49:23 -0400 (0:00:00.729) 0:00:29.594 ***** changed: [managed-node3] => (item=tangd.socket) => { "ansible_loop_var": "item", "changed": true, "enabled": true, "item": "tangd.socket", "name": "tangd.socket", "state": "started", "status": { "Accept": "yes", "AccessSELinuxContext": "system_u:object_r:tangd_unit_file_t:s0", "ActiveEnterTimestamp": "Sat 2024-11-02 08:48:20 EDT", "ActiveEnterTimestampMonotonic": "300282438", "ActiveExitTimestampMonotonic": "0", "ActiveState": "active", "After": "systemd-journald.socket sysinit.target system.slice", "AllowIsolate": "no", "AssertResult": "yes", "AssertTimestamp": "Sat 2024-11-02 08:48:20 EDT", "AssertTimestampMonotonic": "300250725", "Backlog": "2147483647", "Before": "sockets.target shutdown.target", "BindIPv6Only": "default", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "Broadcast": "no", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "9965000", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "no", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Sat 2024-11-02 08:48:20 EDT", "ConditionTimestampMonotonic": "300250722", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroup": "/system.slice/tangd.socket", "ControlGroupId": "4482", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "DeferAcceptUSec": "0", "Delegate": "no", "Description": "Tang Server socket", "DevicePolicy": "auto", "DirectoryMode": "0755", "Documentation": "\"man:tang(8)\"", "DropInPaths": "/etc/systemd/system/tangd.socket.d/override.conf /etc/systemd/system/tangd.socket.d/override2.conf", "DynamicUser": "no", "EffectiveMemoryHigh": "3698376704", "EffectiveMemoryMax": "3698376704", "EffectiveTasksMax": "22336", "ExecStartPre": "{ path=/usr/bin/chown ; argv[]=/usr/bin/chown -R tang:tang /var/db/tang ; ignore_errors=yes ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorName": "tangd.socket", "FinalKillSignal": "9", "FlushPending": "no", "FragmentPath": "/usr/lib/systemd/system/tangd.socket", "FreeBind": "no", "FreezerState": "running", "GID": "[not set]", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "IPTOS": "-1", "IPTTL": "-1", "Id": "tangd.socket", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestamp": "Sat 2024-11-02 08:48:20 EDT", "InactiveExitTimestampMonotonic": "300252693", "InvocationID": "9c19475b3a3f4d7bbe0045d3ec667fd4", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeepAlive": "no", "KeepAliveIntervalUSec": "0", "KeepAliveProbes": "0", "KeepAliveTimeUSec": "0", "KeyringMode": "shared", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13960", "LimitNPROCSoft": "13960", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13960", "LimitSIGPENDINGSoft": "13960", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "Listen": "[::]:7500 (Stream)", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "Mark": "-1", "MaxConnections": "128", "MaxConnectionsPerSource": "0", "MemoryAccounting": "yes", "MemoryAvailable": "3262689280", "MemoryCurrent": "4096", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "1134592", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "0", "MemorySwapMax": "infinity", "MemorySwapPeak": "0", "MemoryZSwapCurrent": "0", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MessageQueueMaxMessages": "0", "MessageQueueMessageSize": "0", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NAccepted": "0", "NConnections": "0", "NRefused": "0", "NUMAPolicy": "n/a", "Names": "tangd.socket", "NeedDaemonReload": "no", "Nice": "0", "NoDelay": "no", "NoNewPrivileges": "no", "NonBlocking": "no", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "PassCredentials": "no", "PassFileDescriptorsToExec": "no", "PassPacketInfo": "no", "PassSecurity": "no", "Perpetual": "no", "PipeSize": "0", "PollLimitBurst": "150", "PollLimitIntervalUSec": "2s", "Priority": "-1", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "ReceiveBuffer": "0", "RefuseManualStart": "no", "RefuseManualStop": "no", "RemoveIPC": "no", "RemoveOnStop": "no", "Requires": "sysinit.target system.slice", "RestartKillSignal": "15", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "ReusePort": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "SameProcessGroup": "no", "SecureBits": "0", "SendBuffer": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "SocketMode": "0666", "SocketProtocol": "0", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestamp": "Sat 2024-11-02 08:48:20 EDT", "StateChangeTimestampMonotonic": "300282438", "StateDirectoryMode": "0755", "StopWhenUnneeded": "no", "SubState": "listening", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "0", "TasksMax": "22336", "TimeoutCleanUSec": "infinity", "TimeoutUSec": "1min 30s", "TimerSlackNSec": "50000", "Timestamping": "off", "Transient": "no", "Transparent": "no", "TriggerLimitBurst": "200", "TriggerLimitIntervalUSec": "2s", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "enabled", "UtmpMode": "init", "WantedBy": "sockets.target", "WatchdogSignal": "6", "Writable": "no" } } TASK [Check tangd socket dir] ************************************************** task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/tests/nbde_server/tests_share_system_dir.yml:58 Saturday 02 November 2024 08:49:24 -0400 (0:00:00.601) 0:00:30.196 ***** ok: [managed-node3] => { "changed": false, "failed_when_result": false, "stat": { "atime": 1730551760.934162, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "ctime": 1730551757.0141687, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 532676801, "isblk": false, "ischr": false, "isdir": true, "isfifo": false, "isgid": false, "islnk": false, "isreg": false, "issock": false, "isuid": false, "mimetype": "inode/directory", "mode": "0775", "mtime": 1730551757.0141687, "nlink": 2, "path": "/etc/systemd/system/tangd.socket.d", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 49, "uid": 0, "version": "2485065829", "wgrp": true, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [Check custom file] ******************************************************* task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/tests/nbde_server/tests_share_system_dir.yml:64 Saturday 02 November 2024 08:49:24 -0400 (0:00:00.366) 0:00:30.562 ***** ok: [managed-node3] => { "changed": false, "failed_when_result": false, "stat": { "atime": 1730551741.6001894, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "05987691cc309e84627f31fa0d1680a3b3b2c4b2", "ctime": 1730551735.5881958, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 536871107, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0664", "mtime": 1730551735.2071962, "nlink": 1, "path": "/etc/systemd/system/tangd.socket.d/override2.conf", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 28, "uid": 0, "version": "709332064", "wgrp": true, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [Verify role reported no changes] ***************************************** task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/tests/nbde_server/tests_share_system_dir.yml:70 Saturday 02 November 2024 08:49:24 -0400 (0:00:00.371) 0:00:30.934 ***** ok: [managed-node3] => { "changed": false } MSG: All assertions passed TASK [Check for ansible_managed, fingerprint in generated files] *************** task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/tests/nbde_server/tests_share_system_dir.yml:74 Saturday 02 November 2024 08:49:24 -0400 (0:00:00.030) 0:00:30.964 ***** included: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/tests/nbde_server/tasks/check_header.yml for managed-node3 TASK [Get file] **************************************************************** task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/tests/nbde_server/tasks/check_header.yml:3 Saturday 02 November 2024 08:49:25 -0400 (0:00:00.037) 0:00:31.002 ***** ok: [managed-node3] => { "changed": false, "content": "IwojIEFuc2libGUgbWFuYWdlZAojCiMgc3lzdGVtX3JvbGU6bmJkZV9zZXJ2ZXIKCltTb2NrZXRdCkxpc3RlblN0cmVhbT0KTGlzdGVuU3RyZWFtPTc1MDAK", "encoding": "base64", "source": "/etc/systemd/system/tangd.socket.d/override.conf" } TASK [Check for presence of ansible managed header, fingerprint] *************** task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/tests/nbde_server/tasks/check_header.yml:9 Saturday 02 November 2024 08:49:25 -0400 (0:00:00.458) 0:00:31.460 ***** ok: [managed-node3] => { "changed": false } MSG: All assertions passed TASK [Remove custom file] ****************************************************** task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/tests/nbde_server/tests_share_system_dir.yml:80 Saturday 02 November 2024 08:49:25 -0400 (0:00:00.106) 0:00:31.566 ***** changed: [managed-node3] => { "changed": true, "path": "/etc/systemd/system/tangd.socket.d/override2.conf", "state": "absent" } TASK [Run the role with default port] ****************************************** task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/tests/nbde_server/tests_share_system_dir.yml:85 Saturday 02 November 2024 08:49:25 -0400 (0:00:00.370) 0:00:31.937 ***** included: fedora.linux_system_roles.nbde_server for managed-node3 TASK [fedora.linux_system_roles.nbde_server : Set version specific variables] *** task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/main.yml:6 Saturday 02 November 2024 08:49:26 -0400 (0:00:00.044) 0:00:31.982 ***** included: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/set_vars.yml for managed-node3 TASK [fedora.linux_system_roles.nbde_server : Ensure ansible_facts used by role] *** task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/set_vars.yml:2 Saturday 02 November 2024 08:49:26 -0400 (0:00:00.036) 0:00:32.018 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__nbde_server_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.nbde_server : Check if system is ostree] ******* task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/set_vars.yml:10 Saturday 02 November 2024 08:49:26 -0400 (0:00:00.046) 0:00:32.064 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __nbde_server_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.nbde_server : Set flag to indicate system is ostree] *** task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/set_vars.yml:15 Saturday 02 November 2024 08:49:26 -0400 (0:00:00.027) 0:00:32.092 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __nbde_server_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.nbde_server : Set platform/version specific variables] *** task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/set_vars.yml:19 Saturday 02 November 2024 08:49:26 -0400 (0:00:00.029) 0:00:32.122 ***** ok: [managed-node3] => { "ansible_facts": { "__nbde_server_cachedir": "/var/cache/tang", "__nbde_server_group": "tang", "__nbde_server_keydir": "/var/db/tang", "__nbde_server_keygen": "/usr/libexec/tangd-keygen", "__nbde_server_packages": [ "tang" ], "__nbde_server_services": [ "tangd.socket" ], "__nbde_server_update": "/usr/libexec/tangd-update", "__nbde_server_user": "tang" }, "ansible_included_var_files": [ "/tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/nbde_server/vars/default.yml" ], "changed": false } TASK [fedora.linux_system_roles.nbde_server : Include the appropriate provider tasks] *** task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/main.yml:9 Saturday 02 November 2024 08:49:26 -0400 (0:00:00.046) 0:00:32.168 ***** included: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/main-tang.yml for managed-node3 TASK [fedora.linux_system_roles.nbde_server : Ensure tang is installed] ******** task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/main-tang.yml:2 Saturday 02 November 2024 08:49:26 -0400 (0:00:00.039) 0:00:32.207 ***** ok: [managed-node3] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: tang TASK [fedora.linux_system_roles.nbde_server : Ensure keys are rotated] ********* task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/main-tang.yml:8 Saturday 02 November 2024 08:49:27 -0400 (0:00:01.046) 0:00:33.254 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "nbde_server_rotate_keys | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.nbde_server : Ensure we have keys] ************* task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/main-tang.yml:17 Saturday 02 November 2024 08:49:27 -0400 (0:00:00.040) 0:00:33.294 ***** ok: [managed-node3] => { "arguments": { "cachedir": "/var/cache/tang", "force": false, "keydir": "/var/db/tang", "keygen": "/usr/libexec/tangd-keygen", "keys_to_deploy_dir": null, "state": "keys-created", "update": "/usr/libexec/tangd-update" }, "changed": false, "state": "keys-created" } TASK [fedora.linux_system_roles.nbde_server : Perform key management (fetch/deploy) tasks] *** task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/main-tang.yml:26 Saturday 02 November 2024 08:49:27 -0400 (0:00:00.366) 0:00:33.661 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "(nbde_server_fetch_keys | bool) or (nbde_server_deploy_keys | bool)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.nbde_server : Manage firewall and SELinux for port] *** task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/main-tang.yml:30 Saturday 02 November 2024 08:49:27 -0400 (0:00:00.056) 0:00:33.717 ***** included: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/tangd-custom-port.yml for managed-node3 TASK [Ensure tang port is labeled tangd_port_t for SELinux] ******************** task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/tangd-custom-port.yml:2 Saturday 02 November 2024 08:49:27 -0400 (0:00:00.048) 0:00:33.766 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "nbde_server_manage_selinux | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.nbde_server : Stat the tangd custom port systemd directory] *** task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/tangd-custom-port.yml:14 Saturday 02 November 2024 08:49:27 -0400 (0:00:00.061) 0:00:33.828 ***** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1730551760.934162, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "ctime": 1730551765.905153, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 532676801, "isblk": false, "ischr": false, "isdir": true, "isfifo": false, "isgid": false, "islnk": false, "isreg": false, "issock": false, "isuid": false, "mimetype": "inode/directory", "mode": "0775", "mtime": 1730551765.905153, "nlink": 2, "path": "/etc/systemd/system/tangd.socket.d", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 27, "uid": 0, "version": "2485065829", "wgrp": true, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.nbde_server : Get a list of files in the tangd custom directory] *** task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/tangd-custom-port.yml:19 Saturday 02 November 2024 08:49:28 -0400 (0:00:00.405) 0:00:34.233 ***** ok: [managed-node3] => { "changed": false, "examined": 1, "files": [], "matched": 0, "skipped_paths": {} } MSG: All paths examined TASK [fedora.linux_system_roles.nbde_server : Manage tangd custom port systemd directory] *** task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/tangd-custom-port.yml:35 Saturday 02 November 2024 08:49:28 -0400 (0:00:00.383) 0:00:34.616 ***** changed: [managed-node3] => { "changed": true, "path": "/etc/systemd/system/tangd.socket.d", "state": "absent" } TASK [fedora.linux_system_roles.nbde_server : Creates the file with the port entry that we want tangd to listen to] *** task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/tangd-custom-port.yml:44 Saturday 02 November 2024 08:49:29 -0400 (0:00:00.384) 0:00:35.000 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "nbde_server_port | int != 80", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.nbde_server : Set flag to to tell main that the port has changed] *** task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/tangd-custom-port.yml:53 Saturday 02 November 2024 08:49:29 -0400 (0:00:00.047) 0:00:35.048 ***** ok: [managed-node3] => { "ansible_facts": { "__nbde_server_port_changed": true }, "changed": false } TASK [Ensure the desired port is added to firewalld] *************************** task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/tangd-custom-port.yml:57 Saturday 02 November 2024 08:49:29 -0400 (0:00:00.031) 0:00:35.080 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "nbde_server_manage_firewall | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.nbde_server : Reload the daemons so the new changes take effect] *** task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/main-tang.yml:34 Saturday 02 November 2024 08:49:29 -0400 (0:00:00.043) 0:00:35.123 ***** ok: [managed-node3] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.nbde_server : Ensure required services are enabled and at the right state] *** task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/main-tang.yml:39 Saturday 02 November 2024 08:49:29 -0400 (0:00:00.733) 0:00:35.857 ***** changed: [managed-node3] => (item=tangd.socket) => { "ansible_loop_var": "item", "changed": true, "enabled": true, "item": "tangd.socket", "name": "tangd.socket", "state": "started", "status": { "Accept": "yes", "AccessSELinuxContext": "system_u:object_r:tangd_unit_file_t:s0", "ActiveEnterTimestamp": "Sat 2024-11-02 08:49:24 EDT", "ActiveEnterTimestampMonotonic": "364343910", "ActiveExitTimestamp": "Sat 2024-11-02 08:49:24 EDT", "ActiveExitTimestampMonotonic": "364312628", "ActiveState": "active", "After": "systemd-journald.socket system.slice sysinit.target", "AllowIsolate": "no", "AssertResult": "yes", "AssertTimestamp": "Sat 2024-11-02 08:49:24 EDT", "AssertTimestampMonotonic": "364312876", "Backlog": "2147483647", "Before": "sockets.target shutdown.target", "BindIPv6Only": "default", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "Broadcast": "no", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "9748000", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "no", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Sat 2024-11-02 08:49:24 EDT", "ConditionTimestampMonotonic": "364312873", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroup": "/system.slice/tangd.socket", "ControlGroupId": "4891", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "DeferAcceptUSec": "0", "Delegate": "no", "Description": "Tang Server socket", "DevicePolicy": "auto", "DirectoryMode": "0755", "Documentation": "\"man:tang(8)\"", "DynamicUser": "no", "EffectiveMemoryHigh": "3698376704", "EffectiveMemoryMax": "3698376704", "EffectiveTasksMax": "22336", "ExecStartPre": "{ path=/usr/bin/chown ; argv[]=/usr/bin/chown -R tang:tang /var/db/tang ; ignore_errors=yes ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorName": "tangd.socket", "FinalKillSignal": "9", "FlushPending": "no", "FragmentPath": "/usr/lib/systemd/system/tangd.socket", "FreeBind": "no", "FreezerState": "running", "GID": "[not set]", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "IPTOS": "-1", "IPTTL": "-1", "Id": "tangd.socket", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestamp": "Sat 2024-11-02 08:49:24 EDT", "InactiveEnterTimestampMonotonic": "364312628", "InactiveExitTimestamp": "Sat 2024-11-02 08:49:24 EDT", "InactiveExitTimestampMonotonic": "364315182", "InvocationID": "6ac2f5f575624511bf2b01f073406f74", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeepAlive": "no", "KeepAliveIntervalUSec": "0", "KeepAliveProbes": "0", "KeepAliveTimeUSec": "0", "KeyringMode": "shared", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13960", "LimitNPROCSoft": "13960", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13960", "LimitSIGPENDINGSoft": "13960", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "Listen": "[::]:80 (Stream)", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "Mark": "-1", "MaxConnections": "64", "MaxConnectionsPerSource": "0", "MemoryAccounting": "yes", "MemoryAvailable": "3268591616", "MemoryCurrent": "4096", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "1155072", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "0", "MemorySwapMax": "infinity", "MemorySwapPeak": "0", "MemoryZSwapCurrent": "0", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MessageQueueMaxMessages": "0", "MessageQueueMessageSize": "0", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NAccepted": "0", "NConnections": "0", "NRefused": "0", "NUMAPolicy": "n/a", "Names": "tangd.socket", "NeedDaemonReload": "no", "Nice": "0", "NoDelay": "no", "NoNewPrivileges": "no", "NonBlocking": "no", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "PassCredentials": "no", "PassFileDescriptorsToExec": "no", "PassPacketInfo": "no", "PassSecurity": "no", "Perpetual": "no", "PipeSize": "0", "PollLimitBurst": "150", "PollLimitIntervalUSec": "2s", "Priority": "-1", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "ReceiveBuffer": "0", "RefuseManualStart": "no", "RefuseManualStop": "no", "RemoveIPC": "no", "RemoveOnStop": "no", "Requires": "system.slice sysinit.target", "RestartKillSignal": "15", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "ReusePort": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "SameProcessGroup": "no", "SecureBits": "0", "SendBuffer": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "SocketMode": "0666", "SocketProtocol": "0", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestamp": "Sat 2024-11-02 08:49:24 EDT", "StateChangeTimestampMonotonic": "364343910", "StateDirectoryMode": "0755", "StopWhenUnneeded": "no", "SubState": "listening", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "0", "TasksMax": "22336", "TimeoutCleanUSec": "infinity", "TimeoutUSec": "1min 30s", "TimerSlackNSec": "50000", "Timestamping": "off", "Transient": "no", "Transparent": "no", "TriggerLimitBurst": "200", "TriggerLimitIntervalUSec": "2s", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "enabled", "UtmpMode": "init", "WantedBy": "sockets.target", "WatchdogSignal": "6", "Writable": "no" } } TASK [Check tangd socket dir is absent] **************************************** task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/tests/nbde_server/tests_share_system_dir.yml:89 Saturday 02 November 2024 08:49:30 -0400 (0:00:00.597) 0:00:36.454 ***** ok: [managed-node3] => { "changed": false, "failed_when_result": false, "stat": { "exists": false } } TASK [Debug] ******************************************************************* task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/tests/nbde_server/tests_share_system_dir.yml:96 Saturday 02 November 2024 08:49:30 -0400 (0:00:00.357) 0:00:36.811 ***** ok: [managed-node3] => { "changed": false, "cmd": [ "journalctl", "-ex" ], "delta": "0:00:00.028120", "end": "2024-11-02 08:49:31.263429", "rc": 0, "start": "2024-11-02 08:49:31.235309" } STDOUT: Nov 02 08:43:21 localhost kernel: scsi host1: ata_piix Nov 02 08:43:21 localhost kernel: ata1: PATA max MWDMA2 cmd 0x1f0 ctl 0x3f6 bmdma 0xc000 irq 14 lpm-pol 0 Nov 02 08:43:21 localhost kernel: ata2: PATA max MWDMA2 cmd 0x170 ctl 0x376 bmdma 0xc008 irq 15 lpm-pol 0 Nov 02 08:43:21 localhost kernel: GPT:Primary header thinks Alt. header is not at the end of the disk. Nov 02 08:43:21 localhost kernel: GPT:12582911 != 524287999 Nov 02 08:43:21 localhost kernel: GPT:Alternate GPT header not at the end of the disk. Nov 02 08:43:21 localhost kernel: GPT:12582911 != 524287999 Nov 02 08:43:21 localhost kernel: GPT: Use GNU Parted to correct GPT errors. Nov 02 08:43:21 localhost kernel: xvda: xvda1 xvda2 Nov 02 08:43:21 localhost systemd[1]: Finished systemd-vconsole-setup.service - Virtual Console Setup. ░░ Subject: A start job for unit systemd-vconsole-setup.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-vconsole-setup.service has finished successfully. ░░ ░░ The job identifier is 62. Nov 02 08:43:21 localhost systemd[1]: run-credentials-systemd\x2dvconsole\x2dsetup.service.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-credentials-systemd\x2dvconsole\x2dsetup.service.mount has successfully entered the 'dead' state. Nov 02 08:43:21 localhost systemd[1]: Mounting sys-kernel-config.mount - Kernel Configuration File System... ░░ Subject: A start job for unit sys-kernel-config.mount has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sys-kernel-config.mount has begun execution. ░░ ░░ The job identifier is 56. Nov 02 08:43:21 localhost systemd[1]: Mounted sys-kernel-config.mount - Kernel Configuration File System. ░░ Subject: A start job for unit sys-kernel-config.mount has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sys-kernel-config.mount has finished successfully. ░░ ░░ The job identifier is 56. Nov 02 08:43:21 localhost systemd[1]: Reached target sysinit.target - System Initialization. ░░ Subject: A start job for unit sysinit.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sysinit.target has finished successfully. ░░ ░░ The job identifier is 3. Nov 02 08:43:21 localhost systemd[1]: Reached target basic.target - Basic System. ░░ Subject: A start job for unit basic.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit basic.target has finished successfully. ░░ ░░ The job identifier is 2. Nov 02 08:43:22 localhost systemd[1]: Found device dev-disk-by\x2duuid-70689318\x2d97f1\x2d4727\x2d890d\x2d5d50652a95d6.device - /dev/disk/by-uuid/70689318-97f1-4727-890d-5d50652a95d6. ░░ Subject: A start job for unit dev-disk-by\x2duuid-70689318\x2d97f1\x2d4727\x2d890d\x2d5d50652a95d6.device has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit dev-disk-by\x2duuid-70689318\x2d97f1\x2d4727\x2d890d\x2d5d50652a95d6.device has finished successfully. ░░ ░░ The job identifier is 35. Nov 02 08:43:22 localhost systemd[1]: Reached target initrd-root-device.target - Initrd Root Device. ░░ Subject: A start job for unit initrd-root-device.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit initrd-root-device.target has finished successfully. ░░ ░░ The job identifier is 34. Nov 02 08:43:22 localhost systemd[1]: Starting systemd-fsck-root.service - File System Check on /dev/disk/by-uuid/70689318-97f1-4727-890d-5d50652a95d6... ░░ Subject: A start job for unit systemd-fsck-root.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-fsck-root.service has begun execution. ░░ ░░ The job identifier is 41. Nov 02 08:43:22 localhost systemd-fsck[420]: /usr/sbin/fsck.xfs: XFS file system. Nov 02 08:43:22 localhost systemd[1]: Finished systemd-fsck-root.service - File System Check on /dev/disk/by-uuid/70689318-97f1-4727-890d-5d50652a95d6. ░░ Subject: A start job for unit systemd-fsck-root.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-fsck-root.service has finished successfully. ░░ ░░ The job identifier is 41. Nov 02 08:43:22 localhost systemd[1]: Mounting sysroot.mount - /sysroot... ░░ Subject: A start job for unit sysroot.mount has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sysroot.mount has begun execution. ░░ ░░ The job identifier is 40. Nov 02 08:43:22 localhost kernel: SGI XFS with ACLs, security attributes, scrub, quota, no debug enabled Nov 02 08:43:22 localhost kernel: XFS (xvda2): Mounting V5 Filesystem 70689318-97f1-4727-890d-5d50652a95d6 Nov 02 08:43:23 localhost kernel: XFS (xvda2): Ending clean mount Nov 02 08:43:23 localhost systemd[1]: Mounted sysroot.mount - /sysroot. ░░ Subject: A start job for unit sysroot.mount has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sysroot.mount has finished successfully. ░░ ░░ The job identifier is 40. Nov 02 08:43:23 localhost systemd[1]: Reached target initrd-root-fs.target - Initrd Root File System. ░░ Subject: A start job for unit initrd-root-fs.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit initrd-root-fs.target has finished successfully. ░░ ░░ The job identifier is 39. Nov 02 08:43:23 localhost systemd[1]: Starting initrd-parse-etc.service - Mountpoints Configured in the Real Root... ░░ Subject: A start job for unit initrd-parse-etc.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit initrd-parse-etc.service has begun execution. ░░ ░░ The job identifier is 38. Nov 02 08:43:23 localhost systemd[1]: initrd-parse-etc.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit initrd-parse-etc.service has successfully entered the 'dead' state. Nov 02 08:43:23 localhost systemd[1]: Finished initrd-parse-etc.service - Mountpoints Configured in the Real Root. ░░ Subject: A start job for unit initrd-parse-etc.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit initrd-parse-etc.service has finished successfully. ░░ ░░ The job identifier is 38. Nov 02 08:43:23 localhost systemd[1]: Reached target initrd-fs.target - Initrd File Systems. ░░ Subject: A start job for unit initrd-fs.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit initrd-fs.target has finished successfully. ░░ ░░ The job identifier is 51. Nov 02 08:43:23 localhost systemd[1]: Reached target initrd.target - Initrd Default Target. ░░ Subject: A start job for unit initrd.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit initrd.target has finished successfully. ░░ ░░ The job identifier is 1. Nov 02 08:43:23 localhost systemd[1]: dracut-mount.service - dracut mount hook was skipped because no trigger condition checks were met. ░░ Subject: A start job for unit dracut-mount.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit dracut-mount.service has finished successfully. ░░ ░░ The job identifier is 45. Nov 02 08:43:23 localhost systemd[1]: Starting dracut-pre-pivot.service - dracut pre-pivot and cleanup hook... ░░ Subject: A start job for unit dracut-pre-pivot.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit dracut-pre-pivot.service has begun execution. ░░ ░░ The job identifier is 47. Nov 02 08:43:23 localhost systemd[1]: Finished dracut-pre-pivot.service - dracut pre-pivot and cleanup hook. ░░ Subject: A start job for unit dracut-pre-pivot.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit dracut-pre-pivot.service has finished successfully. ░░ ░░ The job identifier is 47. Nov 02 08:43:23 localhost systemd[1]: Starting initrd-cleanup.service - Cleaning Up and Shutting Down Daemons... ░░ Subject: A start job for unit initrd-cleanup.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit initrd-cleanup.service has begun execution. ░░ ░░ The job identifier is 65. Nov 02 08:43:23 localhost systemd[1]: Stopped target timers.target - Timer Units. ░░ Subject: A stop job for unit timers.target has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit timers.target has finished. ░░ ░░ The job identifier is 105 and the job result is done. Nov 02 08:43:23 localhost systemd[1]: dracut-pre-pivot.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit dracut-pre-pivot.service has successfully entered the 'dead' state. Nov 02 08:43:23 localhost systemd[1]: Stopped dracut-pre-pivot.service - dracut pre-pivot and cleanup hook. ░░ Subject: A stop job for unit dracut-pre-pivot.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit dracut-pre-pivot.service has finished. ░░ ░░ The job identifier is 118 and the job result is done. Nov 02 08:43:23 localhost systemd[1]: Stopped target initrd.target - Initrd Default Target. ░░ Subject: A stop job for unit initrd.target has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit initrd.target has finished. ░░ ░░ The job identifier is 104 and the job result is done. Nov 02 08:43:23 localhost systemd[1]: Stopped target basic.target - Basic System. ░░ Subject: A stop job for unit basic.target has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit basic.target has finished. ░░ ░░ The job identifier is 111 and the job result is done. Nov 02 08:43:23 localhost systemd[1]: Stopped target initrd-root-device.target - Initrd Root Device. ░░ Subject: A stop job for unit initrd-root-device.target has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit initrd-root-device.target has finished. ░░ ░░ The job identifier is 117 and the job result is done. Nov 02 08:43:23 localhost systemd[1]: Stopped target initrd-usr-fs.target - Initrd /usr File System. ░░ Subject: A stop job for unit initrd-usr-fs.target has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit initrd-usr-fs.target has finished. ░░ ░░ The job identifier is 120 and the job result is done. Nov 02 08:43:23 localhost systemd[1]: Stopped target paths.target - Path Units. ░░ Subject: A stop job for unit paths.target has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit paths.target has finished. ░░ ░░ The job identifier is 100 and the job result is done. Nov 02 08:43:23 localhost systemd[1]: systemd-ask-password-console.path: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-ask-password-console.path has successfully entered the 'dead' state. Nov 02 08:43:23 localhost systemd[1]: Stopped systemd-ask-password-console.path - Dispatch Password Requests to Console Directory Watch. ░░ Subject: A stop job for unit systemd-ask-password-console.path has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit systemd-ask-password-console.path has finished. ░░ ░░ The job identifier is 119 and the job result is done. Nov 02 08:43:23 localhost systemd[1]: Stopped target remote-fs.target - Remote File Systems. ░░ Subject: A stop job for unit remote-fs.target has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit remote-fs.target has finished. ░░ ░░ The job identifier is 114 and the job result is done. Nov 02 08:43:23 localhost systemd[1]: Stopped target remote-fs-pre.target - Preparation for Remote File Systems. ░░ Subject: A stop job for unit remote-fs-pre.target has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit remote-fs-pre.target has finished. ░░ ░░ The job identifier is 115 and the job result is done. Nov 02 08:43:23 localhost systemd[1]: Stopped target slices.target - Slice Units. ░░ Subject: A stop job for unit slices.target has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit slices.target has finished. ░░ ░░ The job identifier is 122 and the job result is done. Nov 02 08:43:23 localhost systemd[1]: Stopped target sockets.target - Socket Units. ░░ Subject: A stop job for unit sockets.target has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit sockets.target has finished. ░░ ░░ The job identifier is 103 and the job result is done. Nov 02 08:43:23 localhost systemd[1]: Stopped target sysinit.target - System Initialization. ░░ Subject: A stop job for unit sysinit.target has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit sysinit.target has finished. ░░ ░░ The job identifier is 109 and the job result is done. Nov 02 08:43:23 localhost systemd[1]: Stopped target swap.target - Swaps. ░░ Subject: A stop job for unit swap.target has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit swap.target has finished. ░░ ░░ The job identifier is 101 and the job result is done. Nov 02 08:43:23 localhost systemd[1]: dracut-pre-mount.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit dracut-pre-mount.service has successfully entered the 'dead' state. Nov 02 08:43:23 localhost systemd[1]: Stopped dracut-pre-mount.service - dracut pre-mount hook. ░░ Subject: A stop job for unit dracut-pre-mount.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit dracut-pre-mount.service has finished. ░░ ░░ The job identifier is 102 and the job result is done. Nov 02 08:43:23 localhost systemd[1]: systemd-sysctl.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-sysctl.service has successfully entered the 'dead' state. Nov 02 08:43:23 localhost systemd[1]: Stopped systemd-sysctl.service - Apply Kernel Variables. ░░ Subject: A stop job for unit systemd-sysctl.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit systemd-sysctl.service has finished. ░░ ░░ The job identifier is 107 and the job result is done. Nov 02 08:43:23 localhost systemd[1]: systemd-tmpfiles-setup.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-tmpfiles-setup.service has successfully entered the 'dead' state. Nov 02 08:43:23 localhost systemd[1]: Stopped systemd-tmpfiles-setup.service - Create System Files and Directories. ░░ Subject: A stop job for unit systemd-tmpfiles-setup.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit systemd-tmpfiles-setup.service has finished. ░░ ░░ The job identifier is 97 and the job result is done. Nov 02 08:43:23 localhost systemd[1]: Stopped target local-fs.target - Local File Systems. ░░ Subject: A stop job for unit local-fs.target has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit local-fs.target has finished. ░░ ░░ The job identifier is 116 and the job result is done. Nov 02 08:43:23 localhost systemd[1]: Stopped target local-fs-pre.target - Preparation for Local File Systems. ░░ Subject: A stop job for unit local-fs-pre.target has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit local-fs-pre.target has finished. ░░ ░░ The job identifier is 108 and the job result is done. Nov 02 08:43:23 localhost systemd[1]: systemd-udev-trigger.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-udev-trigger.service has successfully entered the 'dead' state. Nov 02 08:43:23 localhost systemd[1]: Stopped systemd-udev-trigger.service - Coldplug All udev Devices. ░░ Subject: A stop job for unit systemd-udev-trigger.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit systemd-udev-trigger.service has finished. ░░ ░░ The job identifier is 82 and the job result is done. Nov 02 08:43:23 localhost systemd[1]: Stopping systemd-udevd.service - Rule-based Manager for Device Events and Files... ░░ Subject: A stop job for unit systemd-udevd.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit systemd-udevd.service has begun execution. ░░ ░░ The job identifier is 85. Nov 02 08:43:23 localhost systemd[1]: systemd-vconsole-setup.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-vconsole-setup.service has successfully entered the 'dead' state. Nov 02 08:43:23 localhost systemd[1]: Stopped systemd-vconsole-setup.service - Virtual Console Setup. ░░ Subject: A stop job for unit systemd-vconsole-setup.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit systemd-vconsole-setup.service has finished. ░░ ░░ The job identifier is 106 and the job result is done. Nov 02 08:43:23 localhost systemd[1]: systemd-udevd.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-udevd.service has successfully entered the 'dead' state. Nov 02 08:43:23 localhost systemd[1]: Stopped systemd-udevd.service - Rule-based Manager for Device Events and Files. ░░ Subject: A stop job for unit systemd-udevd.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit systemd-udevd.service has finished. ░░ ░░ The job identifier is 85 and the job result is done. Nov 02 08:43:23 localhost systemd[1]: initrd-cleanup.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit initrd-cleanup.service has successfully entered the 'dead' state. Nov 02 08:43:23 localhost systemd[1]: Finished initrd-cleanup.service - Cleaning Up and Shutting Down Daemons. ░░ Subject: A start job for unit initrd-cleanup.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit initrd-cleanup.service has finished successfully. ░░ ░░ The job identifier is 65. Nov 02 08:43:23 localhost systemd[1]: systemd-udevd-control.socket: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-udevd-control.socket has successfully entered the 'dead' state. Nov 02 08:43:23 localhost systemd[1]: Closed systemd-udevd-control.socket - udev Control Socket. ░░ Subject: A stop job for unit systemd-udevd-control.socket has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit systemd-udevd-control.socket has finished. ░░ ░░ The job identifier is 83 and the job result is done. Nov 02 08:43:23 localhost systemd[1]: systemd-udevd-kernel.socket: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-udevd-kernel.socket has successfully entered the 'dead' state. Nov 02 08:43:23 localhost systemd[1]: Closed systemd-udevd-kernel.socket - udev Kernel Socket. ░░ Subject: A stop job for unit systemd-udevd-kernel.socket has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit systemd-udevd-kernel.socket has finished. ░░ ░░ The job identifier is 86 and the job result is done. Nov 02 08:43:23 localhost systemd[1]: dracut-pre-udev.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit dracut-pre-udev.service has successfully entered the 'dead' state. Nov 02 08:43:23 localhost systemd[1]: Stopped dracut-pre-udev.service - dracut pre-udev hook. ░░ Subject: A stop job for unit dracut-pre-udev.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit dracut-pre-udev.service has finished. ░░ ░░ The job identifier is 113 and the job result is done. Nov 02 08:43:23 localhost systemd[1]: dracut-cmdline.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit dracut-cmdline.service has successfully entered the 'dead' state. Nov 02 08:43:23 localhost systemd[1]: Stopped dracut-cmdline.service - dracut cmdline hook. ░░ Subject: A stop job for unit dracut-cmdline.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit dracut-cmdline.service has finished. ░░ ░░ The job identifier is 112 and the job result is done. Nov 02 08:43:23 localhost systemd[1]: Starting initrd-udevadm-cleanup-db.service - Cleanup udev Database... ░░ Subject: A start job for unit initrd-udevadm-cleanup-db.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit initrd-udevadm-cleanup-db.service has begun execution. ░░ ░░ The job identifier is 81. Nov 02 08:43:23 localhost systemd[1]: systemd-tmpfiles-setup-dev.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-tmpfiles-setup-dev.service has successfully entered the 'dead' state. Nov 02 08:43:23 localhost systemd[1]: Stopped systemd-tmpfiles-setup-dev.service - Create Static Device Nodes in /dev. ░░ Subject: A stop job for unit systemd-tmpfiles-setup-dev.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit systemd-tmpfiles-setup-dev.service has finished. ░░ ░░ The job identifier is 98 and the job result is done. Nov 02 08:43:23 localhost systemd[1]: systemd-sysusers.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-sysusers.service has successfully entered the 'dead' state. Nov 02 08:43:23 localhost systemd[1]: Stopped systemd-sysusers.service - Create System Users. ░░ Subject: A stop job for unit systemd-sysusers.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit systemd-sysusers.service has finished. ░░ ░░ The job identifier is 99 and the job result is done. Nov 02 08:43:23 localhost systemd[1]: systemd-tmpfiles-setup-dev-early.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-tmpfiles-setup-dev-early.service has successfully entered the 'dead' state. Nov 02 08:43:23 localhost systemd[1]: Stopped systemd-tmpfiles-setup-dev-early.service - Create Static Device Nodes in /dev gracefully. ░░ Subject: A stop job for unit systemd-tmpfiles-setup-dev-early.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit systemd-tmpfiles-setup-dev-early.service has finished. ░░ ░░ The job identifier is 95 and the job result is done. Nov 02 08:43:23 localhost systemd[1]: kmod-static-nodes.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit kmod-static-nodes.service has successfully entered the 'dead' state. Nov 02 08:43:23 localhost systemd[1]: Stopped kmod-static-nodes.service - Create List of Static Device Nodes. ░░ Subject: A stop job for unit kmod-static-nodes.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit kmod-static-nodes.service has finished. ░░ ░░ The job identifier is 121 and the job result is done. Nov 02 08:43:23 localhost systemd[1]: initrd-udevadm-cleanup-db.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit initrd-udevadm-cleanup-db.service has successfully entered the 'dead' state. Nov 02 08:43:23 localhost systemd[1]: Finished initrd-udevadm-cleanup-db.service - Cleanup udev Database. ░░ Subject: A start job for unit initrd-udevadm-cleanup-db.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit initrd-udevadm-cleanup-db.service has finished successfully. ░░ ░░ The job identifier is 81. Nov 02 08:43:23 localhost systemd[1]: Reached target initrd-switch-root.target - Switch Root. ░░ Subject: A start job for unit initrd-switch-root.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit initrd-switch-root.target has finished successfully. ░░ ░░ The job identifier is 68. Nov 02 08:43:23 localhost systemd[1]: Starting initrd-switch-root.service - Switch Root... ░░ Subject: A start job for unit initrd-switch-root.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit initrd-switch-root.service has begun execution. ░░ ░░ The job identifier is 93. Nov 02 08:43:23 localhost systemd[1]: Switching root. Nov 02 08:43:23 localhost systemd-journald[259]: Journal stopped ░░ Subject: The journal has been stopped ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The system journal process has shut down and closed all currently ░░ active journal files. Nov 02 08:43:30 localhost systemd-journald[259]: Received SIGTERM from PID 1 (systemd). Nov 02 08:43:30 localhost kernel: audit: type=1404 audit(1730551405.397:2): enforcing=1 old_enforcing=0 auid=4294967295 ses=4294967295 enabled=1 old-enabled=1 lsm=selinux res=1 Nov 02 08:43:30 localhost kernel: SELinux: policy capability network_peer_controls=1 Nov 02 08:43:30 localhost kernel: SELinux: policy capability open_perms=1 Nov 02 08:43:30 localhost kernel: SELinux: policy capability extended_socket_class=1 Nov 02 08:43:30 localhost kernel: SELinux: policy capability always_check_network=0 Nov 02 08:43:30 localhost kernel: SELinux: policy capability cgroup_seclabel=1 Nov 02 08:43:30 localhost kernel: SELinux: policy capability nnp_nosuid_transition=1 Nov 02 08:43:30 localhost kernel: SELinux: policy capability genfs_seclabel_symlinks=1 Nov 02 08:43:30 localhost kernel: SELinux: policy capability ioctl_skip_cloexec=0 Nov 02 08:43:30 localhost kernel: SELinux: policy capability userspace_initial_context=0 Nov 02 08:43:30 localhost kernel: audit: type=1403 audit(1730551405.958:3): auid=4294967295 ses=4294967295 lsm=selinux res=1 Nov 02 08:43:30 localhost systemd[1]: Successfully loaded SELinux policy in 594.340ms. Nov 02 08:43:30 localhost systemd[1]: Relabeled /dev/, /dev/shm/, /run/ in 16.569ms. Nov 02 08:43:30 localhost systemd[1]: systemd 256-15.el10 running in system mode (+PAM +AUDIT +SELINUX -APPARMOR +IMA +SMACK +SECCOMP -GCRYPT -GNUTLS +OPENSSL +ACL +BLKID +CURL +ELFUTILS +FIDO2 +IDN2 -IDN -IPTC +KMOD +LIBCRYPTSETUP +LIBCRYPTSETUP_PLUGINS +LIBFDISK +PCRE2 +PWQUALITY +P11KIT -QRENCODE +TPM2 +BZIP2 +LZ4 +XZ +ZLIB +ZSTD +BPF_FRAMEWORK +XKBCOMMON +UTMP +SYSVINIT +LIBARCHIVE) Nov 02 08:43:30 localhost systemd[1]: Detected virtualization xen. Nov 02 08:43:30 localhost systemd[1]: Detected architecture x86-64. Nov 02 08:43:30 localhost systemd[1]: Initializing machine ID from VM UUID. Nov 02 08:43:30 localhost systemd[1]: Installed transient /etc/machine-id file. Nov 02 08:43:30 localhost systemd[1]: bpf-restrict-fs: LSM BPF program attached Nov 02 08:43:30 localhost systemd[1]: run-credentials-systemd\x2djournald.service.mount: Deactivated successfully. Nov 02 08:43:30 localhost systemd[1]: initrd-switch-root.service: Deactivated successfully. Nov 02 08:43:30 localhost systemd[1]: Stopped initrd-switch-root.service - Switch Root. Nov 02 08:43:30 localhost systemd[1]: systemd-journald.service: Scheduled restart job, restart counter is at 1. Nov 02 08:43:30 localhost systemd[1]: Created slice system-getty.slice - Slice /system/getty. Nov 02 08:43:30 localhost systemd[1]: Created slice system-serial\x2dgetty.slice - Slice /system/serial-getty. Nov 02 08:43:30 localhost systemd[1]: Created slice system-sshd\x2dkeygen.slice - Slice /system/sshd-keygen. Nov 02 08:43:30 localhost systemd[1]: Created slice user.slice - User and Session Slice. Nov 02 08:43:30 localhost systemd[1]: Started systemd-ask-password-console.path - Dispatch Password Requests to Console Directory Watch. Nov 02 08:43:30 localhost systemd[1]: Started systemd-ask-password-wall.path - Forward Password Requests to Wall Directory Watch. Nov 02 08:43:30 localhost systemd[1]: Set up automount proc-sys-fs-binfmt_misc.automount - Arbitrary Executable File Formats File System Automount Point. Nov 02 08:43:30 localhost systemd[1]: Expecting device dev-ttyS0.device - /dev/ttyS0... Nov 02 08:43:30 localhost systemd[1]: Reached target cryptsetup.target - Local Encrypted Volumes. Nov 02 08:43:30 localhost systemd[1]: Stopped target initrd-switch-root.target - Switch Root. Nov 02 08:43:30 localhost systemd[1]: Stopped target initrd-fs.target - Initrd File Systems. Nov 02 08:43:30 localhost systemd[1]: Stopped target initrd-root-fs.target - Initrd Root File System. Nov 02 08:43:30 localhost systemd[1]: Reached target integritysetup.target - Local Integrity Protected Volumes. Nov 02 08:43:30 localhost systemd[1]: Reached target paths.target - Path Units. Nov 02 08:43:30 localhost systemd[1]: Reached target slices.target - Slice Units. Nov 02 08:43:30 localhost systemd[1]: Reached target swap.target - Swaps. Nov 02 08:43:30 localhost systemd[1]: Reached target veritysetup.target - Local Verity Protected Volumes. Nov 02 08:43:30 localhost systemd[1]: Listening on dm-event.socket - Device-mapper event daemon FIFOs. Nov 02 08:43:30 localhost systemd[1]: Listening on lvm2-lvmpolld.socket - LVM2 poll daemon socket. Nov 02 08:43:30 localhost systemd[1]: Listening on rpcbind.socket - RPCbind Server Activation Socket. Nov 02 08:43:30 localhost systemd[1]: Reached target rpcbind.target - RPC Port Mapper. Nov 02 08:43:30 localhost systemd[1]: Listening on systemd-coredump.socket - Process Core Dump Socket. Nov 02 08:43:30 localhost systemd[1]: Listening on systemd-creds.socket - Credential Encryption/Decryption. Nov 02 08:43:30 localhost systemd[1]: Listening on systemd-initctl.socket - initctl Compatibility Named Pipe. Nov 02 08:43:30 localhost systemd[1]: systemd-pcrextend.socket - TPM PCR Measurements was skipped because of an unmet condition check (ConditionSecurity=measured-uki). Nov 02 08:43:30 localhost systemd[1]: systemd-pcrlock.socket - Make TPM PCR Policy was skipped because of an unmet condition check (ConditionSecurity=measured-uki). Nov 02 08:43:30 localhost systemd[1]: Listening on systemd-udevd-control.socket - udev Control Socket. Nov 02 08:43:30 localhost systemd[1]: Listening on systemd-udevd-kernel.socket - udev Kernel Socket. Nov 02 08:43:30 localhost systemd[1]: Mounting dev-hugepages.mount - Huge Pages File System... Nov 02 08:43:30 localhost systemd[1]: Mounting dev-mqueue.mount - POSIX Message Queue File System... Nov 02 08:43:30 localhost systemd[1]: Mounting sys-kernel-debug.mount - Kernel Debug File System... Nov 02 08:43:30 localhost systemd[1]: Mounting sys-kernel-tracing.mount - Kernel Trace File System... Nov 02 08:43:30 localhost systemd[1]: auth-rpcgss-module.service - Kernel Module supporting RPCSEC_GSS was skipped because of an unmet condition check (ConditionPathExists=/etc/krb5.keytab). Nov 02 08:43:30 localhost systemd[1]: Starting kmod-static-nodes.service - Create List of Static Device Nodes... Nov 02 08:43:30 localhost systemd[1]: Starting lvm2-monitor.service - Monitoring of LVM2 mirrors, snapshots etc. using dmeventd or progress polling... Nov 02 08:43:30 localhost systemd[1]: Starting modprobe@configfs.service - Load Kernel Module configfs... Nov 02 08:43:30 localhost systemd[1]: Starting modprobe@dm_mod.service - Load Kernel Module dm_mod... Nov 02 08:43:30 localhost systemd[1]: Starting modprobe@drm.service - Load Kernel Module drm... Nov 02 08:43:30 localhost systemd[1]: Starting modprobe@efi_pstore.service - Load Kernel Module efi_pstore... Nov 02 08:43:30 localhost systemd[1]: Starting modprobe@fuse.service - Load Kernel Module fuse... Nov 02 08:43:30 localhost systemd[1]: Starting modprobe@loop.service - Load Kernel Module loop... Nov 02 08:43:30 localhost systemd[1]: systemd-fsck-root.service: Deactivated successfully. Nov 02 08:43:30 localhost systemd[1]: Stopped systemd-fsck-root.service - File System Check on Root Device. Nov 02 08:43:30 localhost systemd[1]: systemd-hibernate-clear.service - Clear Stale Hibernate Storage Info was skipped because of an unmet condition check (ConditionPathExists=/sys/firmware/efi/efivars/HibernateLocation-8cf2644b-4b0b-428f-9387-6d876050dc67). Nov 02 08:43:30 localhost systemd[1]: Starting systemd-journald.service - Journal Service... Nov 02 08:43:30 localhost systemd[1]: systemd-modules-load.service - Load Kernel Modules was skipped because no trigger condition checks were met. Nov 02 08:43:30 localhost systemd[1]: Starting systemd-network-generator.service - Generate network units from Kernel command line... Nov 02 08:43:30 localhost systemd[1]: systemd-pcrmachine.service - TPM PCR Machine ID Measurement was skipped because of an unmet condition check (ConditionSecurity=measured-uki). Nov 02 08:43:30 localhost systemd[1]: Starting systemd-remount-fs.service - Remount Root and Kernel File Systems... Nov 02 08:43:30 localhost systemd[1]: Starting systemd-sysctl.service - Apply Kernel Variables... Nov 02 08:43:30 localhost systemd[1]: systemd-tpm2-setup-early.service - Early TPM SRK Setup was skipped because of an unmet condition check (ConditionSecurity=measured-uki). Nov 02 08:43:30 localhost systemd[1]: Starting systemd-udev-load-credentials.service - Load udev Rules from Credentials... Nov 02 08:43:30 localhost systemd[1]: Starting systemd-udev-trigger.service - Coldplug All udev Devices... Nov 02 08:43:30 localhost systemd[1]: modprobe@configfs.service: Deactivated successfully. Nov 02 08:43:30 localhost systemd[1]: Finished modprobe@configfs.service - Load Kernel Module configfs. Nov 02 08:43:30 localhost systemd[1]: Finished kmod-static-nodes.service - Create List of Static Device Nodes. Nov 02 08:43:30 localhost systemd[1]: modprobe@drm.service: Deactivated successfully. Nov 02 08:43:30 localhost systemd[1]: Finished modprobe@drm.service - Load Kernel Module drm. Nov 02 08:43:30 localhost systemd[1]: modprobe@efi_pstore.service: Deactivated successfully. Nov 02 08:43:30 localhost systemd[1]: Finished modprobe@efi_pstore.service - Load Kernel Module efi_pstore. Nov 02 08:43:30 localhost systemd[1]: Starting systemd-tmpfiles-setup-dev-early.service - Create Static Device Nodes in /dev gracefully... Nov 02 08:43:30 localhost systemd[1]: Mounted dev-hugepages.mount - Huge Pages File System. Nov 02 08:43:30 localhost systemd[1]: Mounted dev-mqueue.mount - POSIX Message Queue File System. Nov 02 08:43:30 localhost systemd[1]: Mounted sys-kernel-debug.mount - Kernel Debug File System. Nov 02 08:43:30 localhost systemd[1]: Mounted sys-kernel-tracing.mount - Kernel Trace File System. Nov 02 08:43:30 localhost kernel: loop: module loaded Nov 02 08:43:30 localhost systemd[1]: modprobe@loop.service: Deactivated successfully. Nov 02 08:43:30 localhost systemd[1]: Finished modprobe@loop.service - Load Kernel Module loop. Nov 02 08:43:30 localhost kernel: fuse: init (API version 7.41) Nov 02 08:43:30 localhost systemd[1]: modprobe@fuse.service: Deactivated successfully. Nov 02 08:43:30 localhost systemd[1]: Finished modprobe@fuse.service - Load Kernel Module fuse. Nov 02 08:43:30 localhost systemd[1]: Finished systemd-network-generator.service - Generate network units from Kernel command line. Nov 02 08:43:30 localhost systemd[1]: Finished systemd-remount-fs.service - Remount Root and Kernel File Systems. Nov 02 08:43:30 localhost systemd-journald[520]: Collecting audit messages is disabled. Nov 02 08:43:30 localhost systemd[1]: systemd-hwdb-update.service - Rebuild Hardware Database was skipped because of an unmet condition check (ConditionNeedsUpdate=/etc). Nov 02 08:43:30 localhost systemd[1]: systemd-pstore.service - Platform Persistent Storage Archival was skipped because of an unmet condition check (ConditionDirectoryNotEmpty=/sys/fs/pstore). Nov 02 08:43:30 localhost systemd[1]: Starting systemd-random-seed.service - Load/Save OS Random Seed... Nov 02 08:43:30 localhost systemd[1]: systemd-tpm2-setup.service - TPM SRK Setup was skipped because of an unmet condition check (ConditionSecurity=measured-uki). Nov 02 08:43:30 localhost kernel: device-mapper: core: CONFIG_IMA_DISABLE_HTABLE is disabled. Duplicate IMA measurements will not be recorded in the IMA log. Nov 02 08:43:30 localhost systemd-journald[520]: Journal started ░░ Subject: The journal has been started ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The system journal process has started up, opened the journal ░░ files for writing and is now ready to process requests. Nov 02 08:43:30 localhost systemd-journald[520]: Runtime Journal (/run/log/journal/ec20af8a84a0db36ec472cffd50a475b) is 8M, max 70.5M, 62.5M free. ░░ Subject: Disk space used by the journal ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ Runtime Journal (/run/log/journal/ec20af8a84a0db36ec472cffd50a475b) is currently using 8M. ░░ Maximum allowed usage is set to 70.5M. ░░ Leaving at least 35.2M free (of currently available 689.3M of disk space). ░░ Enforced usage limit is thus 70.5M, of which 62.5M are still available. ░░ ░░ The limits controlling how much disk space is used by the journal may ░░ be configured with SystemMaxUse=, SystemKeepFree=, SystemMaxFileSize=, ░░ RuntimeMaxUse=, RuntimeKeepFree=, RuntimeMaxFileSize= settings in ░░ /etc/systemd/journald.conf. See journald.conf(5) for details. Nov 02 08:43:30 localhost systemd[1]: Queued start job for default target multi-user.target. Nov 02 08:43:30 localhost systemd[1]: systemd-journald.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-journald.service has successfully entered the 'dead' state. Nov 02 08:43:30 localhost kernel: device-mapper: uevent: version 1.0.3 Nov 02 08:43:30 localhost systemd[1]: Started systemd-journald.service - Journal Service. Nov 02 08:43:30 localhost kernel: device-mapper: ioctl: 4.48.0-ioctl (2023-03-01) initialised: dm-devel@lists.linux.dev Nov 02 08:43:30 localhost systemd[1]: Finished systemd-udev-load-credentials.service - Load udev Rules from Credentials. ░░ Subject: A start job for unit systemd-udev-load-credentials.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-udev-load-credentials.service has finished successfully. ░░ ░░ The job identifier is 143. Nov 02 08:43:30 localhost systemd[1]: modprobe@dm_mod.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit modprobe@dm_mod.service has successfully entered the 'dead' state. Nov 02 08:43:30 localhost systemd[1]: Finished modprobe@dm_mod.service - Load Kernel Module dm_mod. ░░ Subject: A start job for unit modprobe@dm_mod.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit modprobe@dm_mod.service has finished successfully. ░░ ░░ The job identifier is 166. Nov 02 08:43:30 localhost systemd[1]: Starting systemd-journal-flush.service - Flush Journal to Persistent Storage... ░░ Subject: A start job for unit systemd-journal-flush.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-journal-flush.service has begun execution. ░░ ░░ The job identifier is 193. Nov 02 08:43:30 localhost systemd[1]: systemd-repart.service - Repartition Root Disk was skipped because no trigger condition checks were met. ░░ Subject: A start job for unit systemd-repart.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-repart.service has finished successfully. ░░ ░░ The job identifier is 165. Nov 02 08:43:30 localhost systemd[1]: Finished systemd-sysctl.service - Apply Kernel Variables. ░░ Subject: A start job for unit systemd-sysctl.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-sysctl.service has finished successfully. ░░ ░░ The job identifier is 192. Nov 02 08:43:30 localhost systemd[1]: Finished systemd-random-seed.service - Load/Save OS Random Seed. ░░ Subject: A start job for unit systemd-random-seed.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-random-seed.service has finished successfully. ░░ ░░ The job identifier is 155. Nov 02 08:43:30 localhost systemd[1]: Finished systemd-udev-trigger.service - Coldplug All udev Devices. ░░ Subject: A start job for unit systemd-udev-trigger.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-udev-trigger.service has finished successfully. ░░ ░░ The job identifier is 137. Nov 02 08:43:30 localhost systemd-journald[520]: Runtime Journal (/run/log/journal/ec20af8a84a0db36ec472cffd50a475b) is 8M, max 70.5M, 62.5M free. ░░ Subject: Disk space used by the journal ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ Runtime Journal (/run/log/journal/ec20af8a84a0db36ec472cffd50a475b) is currently using 8M. ░░ Maximum allowed usage is set to 70.5M. ░░ Leaving at least 35.2M free (of currently available 689.3M of disk space). ░░ Enforced usage limit is thus 70.5M, of which 62.5M are still available. ░░ ░░ The limits controlling how much disk space is used by the journal may ░░ be configured with SystemMaxUse=, SystemKeepFree=, SystemMaxFileSize=, ░░ RuntimeMaxUse=, RuntimeKeepFree=, RuntimeMaxFileSize= settings in ░░ /etc/systemd/journald.conf. See journald.conf(5) for details. Nov 02 08:43:30 localhost systemd-journald[520]: Received client request to flush runtime journal. Nov 02 08:43:30 localhost systemd[1]: Finished systemd-journal-flush.service - Flush Journal to Persistent Storage. ░░ Subject: A start job for unit systemd-journal-flush.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-journal-flush.service has finished successfully. ░░ ░░ The job identifier is 193. Nov 02 08:43:30 localhost systemd[1]: Finished systemd-tmpfiles-setup-dev-early.service - Create Static Device Nodes in /dev gracefully. ░░ Subject: A start job for unit systemd-tmpfiles-setup-dev-early.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-tmpfiles-setup-dev-early.service has finished successfully. ░░ ░░ The job identifier is 146. Nov 02 08:43:30 localhost systemd[1]: systemd-sysusers.service - Create System Users was skipped because no trigger condition checks were met. ░░ Subject: A start job for unit systemd-sysusers.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-sysusers.service has finished successfully. ░░ ░░ The job identifier is 200. Nov 02 08:43:30 localhost systemd[1]: Starting systemd-tmpfiles-setup-dev.service - Create Static Device Nodes in /dev... ░░ Subject: A start job for unit systemd-tmpfiles-setup-dev.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-tmpfiles-setup-dev.service has begun execution. ░░ ░░ The job identifier is 179. Nov 02 08:43:30 localhost systemd[1]: Finished systemd-tmpfiles-setup-dev.service - Create Static Device Nodes in /dev. ░░ Subject: A start job for unit systemd-tmpfiles-setup-dev.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-tmpfiles-setup-dev.service has finished successfully. ░░ ░░ The job identifier is 179. Nov 02 08:43:30 localhost systemd[1]: Starting systemd-udevd.service - Rule-based Manager for Device Events and Files... ░░ Subject: A start job for unit systemd-udevd.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-udevd.service has begun execution. ░░ ░░ The job identifier is 138. Nov 02 08:43:31 localhost systemd-udevd[558]: Using default interface naming scheme 'rhel-10.0-beta'. Nov 02 08:43:31 localhost systemd[1]: Finished lvm2-monitor.service - Monitoring of LVM2 mirrors, snapshots etc. using dmeventd or progress polling. ░░ Subject: A start job for unit lvm2-monitor.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit lvm2-monitor.service has finished successfully. ░░ ░░ The job identifier is 180. Nov 02 08:43:31 localhost systemd[1]: Reached target local-fs-pre.target - Preparation for Local File Systems. ░░ Subject: A start job for unit local-fs-pre.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit local-fs-pre.target has finished successfully. ░░ ░░ The job identifier is 147. Nov 02 08:43:31 localhost systemd[1]: Reached target local-fs.target - Local File Systems. ░░ Subject: A start job for unit local-fs.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit local-fs.target has finished successfully. ░░ ░░ The job identifier is 149. Nov 02 08:43:31 localhost systemd[1]: Listening on systemd-bootctl.socket - Boot Entries Service Socket. ░░ Subject: A start job for unit systemd-bootctl.socket has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-bootctl.socket has finished successfully. ░░ ░░ The job identifier is 208. Nov 02 08:43:31 localhost systemd[1]: Listening on systemd-sysext.socket - System Extension Image Management. ░░ Subject: A start job for unit systemd-sysext.socket has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-sysext.socket has finished successfully. ░░ ░░ The job identifier is 207. Nov 02 08:43:31 localhost systemd[1]: ldconfig.service - Rebuild Dynamic Linker Cache was skipped because no trigger condition checks were met. ░░ Subject: A start job for unit ldconfig.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit ldconfig.service has finished successfully. ░░ ░░ The job identifier is 126. Nov 02 08:43:31 localhost systemd[1]: selinux-autorelabel-mark.service - Mark the need to relabel after reboot was skipped because of an unmet condition check (ConditionSecurity=!selinux). ░░ Subject: A start job for unit selinux-autorelabel-mark.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit selinux-autorelabel-mark.service has finished successfully. ░░ ░░ The job identifier is 182. Nov 02 08:43:31 localhost systemd[1]: systemd-binfmt.service - Set Up Additional Binary Formats was skipped because no trigger condition checks were met. ░░ Subject: A start job for unit systemd-binfmt.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-binfmt.service has finished successfully. ░░ ░░ The job identifier is 199. Nov 02 08:43:31 localhost systemd[1]: systemd-boot-random-seed.service - Update Boot Loader Random Seed was skipped because no trigger condition checks were met. ░░ Subject: A start job for unit systemd-boot-random-seed.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-boot-random-seed.service has finished successfully. ░░ ░░ The job identifier is 157. Nov 02 08:43:31 localhost systemd[1]: systemd-confext.service - Merge System Configuration Images into /etc/ was skipped because no trigger condition checks were met. ░░ Subject: A start job for unit systemd-confext.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-confext.service has finished successfully. ░░ ░░ The job identifier is 178. Nov 02 08:43:31 localhost systemd[1]: systemd-sysext.service - Merge System Extension Images into /usr/ and /opt/ was skipped because no trigger condition checks were met. ░░ Subject: A start job for unit systemd-sysext.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-sysext.service has finished successfully. ░░ ░░ The job identifier is 151. Nov 02 08:43:31 localhost systemd[1]: Starting systemd-tmpfiles-setup.service - Create System Files and Directories... ░░ Subject: A start job for unit systemd-tmpfiles-setup.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-tmpfiles-setup.service has begun execution. ░░ ░░ The job identifier is 158. Nov 02 08:43:31 localhost systemd[1]: run-credentials-systemd\x2dnetwork\x2dgenerator.service.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-credentials-systemd\x2dnetwork\x2dgenerator.service.mount has successfully entered the 'dead' state. Nov 02 08:43:31 localhost systemd[1]: run-credentials-systemd\x2dsysctl.service.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-credentials-systemd\x2dsysctl.service.mount has successfully entered the 'dead' state. Nov 02 08:43:31 localhost systemd[1]: run-credentials-systemd\x2dtmpfiles\x2dsetup\x2ddev\x2dearly.service.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-credentials-systemd\x2dtmpfiles\x2dsetup\x2ddev\x2dearly.service.mount has successfully entered the 'dead' state. Nov 02 08:43:31 localhost systemd[1]: run-credentials-systemd\x2dudev\x2dload\x2dcredentials.service.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-credentials-systemd\x2dudev\x2dload\x2dcredentials.service.mount has successfully entered the 'dead' state. Nov 02 08:43:31 localhost systemd[1]: Mounting sys-fs-fuse-connections.mount - FUSE Control File System... ░░ Subject: A start job for unit sys-fs-fuse-connections.mount has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sys-fs-fuse-connections.mount has begun execution. ░░ ░░ The job identifier is 183. Nov 02 08:43:31 localhost systemd[1]: Mounted sys-fs-fuse-connections.mount - FUSE Control File System. ░░ Subject: A start job for unit sys-fs-fuse-connections.mount has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sys-fs-fuse-connections.mount has finished successfully. ░░ ░░ The job identifier is 183. Nov 02 08:43:31 localhost systemd[1]: Finished systemd-tmpfiles-setup.service - Create System Files and Directories. ░░ Subject: A start job for unit systemd-tmpfiles-setup.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-tmpfiles-setup.service has finished successfully. ░░ ░░ The job identifier is 158. Nov 02 08:43:31 localhost systemd[1]: run-credentials-systemd\x2dtmpfiles\x2dsetup.service.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-credentials-systemd\x2dtmpfiles\x2dsetup.service.mount has successfully entered the 'dead' state. Nov 02 08:43:31 localhost systemd[1]: Mounting var-lib-nfs-rpc_pipefs.mount - RPC Pipe File System... ░░ Subject: A start job for unit var-lib-nfs-rpc_pipefs.mount has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit var-lib-nfs-rpc_pipefs.mount has begun execution. ░░ ░░ The job identifier is 238. Nov 02 08:43:31 localhost systemd[1]: Starting audit-rules.service - Load Audit Rules... ░░ Subject: A start job for unit audit-rules.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit audit-rules.service has begun execution. ░░ ░░ The job identifier is 242. Nov 02 08:43:31 localhost systemd[1]: Starting rpcbind.service - RPC Bind... ░░ Subject: A start job for unit rpcbind.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit rpcbind.service has begun execution. ░░ ░░ The job identifier is 278. Nov 02 08:43:31 localhost systemd[1]: systemd-firstboot.service - First Boot Wizard was skipped because of an unmet condition check (ConditionFirstBoot=yes). ░░ Subject: A start job for unit systemd-firstboot.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-firstboot.service has finished successfully. ░░ ░░ The job identifier is 189. Nov 02 08:43:31 localhost systemd[1]: first-boot-complete.target - First Boot Complete was skipped because of an unmet condition check (ConditionFirstBoot=yes). ░░ Subject: A start job for unit first-boot-complete.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit first-boot-complete.target has finished successfully. ░░ ░░ The job identifier is 156. Nov 02 08:43:31 localhost systemd[1]: systemd-journal-catalog-update.service - Rebuild Journal Catalog was skipped because of an unmet condition check (ConditionNeedsUpdate=/var). ░░ Subject: A start job for unit systemd-journal-catalog-update.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-journal-catalog-update.service has finished successfully. ░░ ░░ The job identifier is 186. Nov 02 08:43:31 localhost systemd[1]: Starting systemd-machine-id-commit.service - Save Transient machine-id to Disk... ░░ Subject: A start job for unit systemd-machine-id-commit.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-machine-id-commit.service has begun execution. ░░ ░░ The job identifier is 136. Nov 02 08:43:31 localhost systemd[1]: systemd-update-done.service - Update is Completed was skipped because no trigger condition checks were met. ░░ Subject: A start job for unit systemd-update-done.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-update-done.service has finished successfully. ░░ ░░ The job identifier is 177. Nov 02 08:43:31 localhost kernel: RPC: Registered named UNIX socket transport module. Nov 02 08:43:31 localhost kernel: RPC: Registered udp transport module. Nov 02 08:43:31 localhost kernel: RPC: Registered tcp transport module. Nov 02 08:43:31 localhost kernel: RPC: Registered tcp-with-tls transport module. Nov 02 08:43:31 localhost kernel: RPC: Registered tcp NFSv4.1 backchannel transport module. Nov 02 08:43:31 localhost systemd[1]: Mounted var-lib-nfs-rpc_pipefs.mount - RPC Pipe File System. ░░ Subject: A start job for unit var-lib-nfs-rpc_pipefs.mount has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit var-lib-nfs-rpc_pipefs.mount has finished successfully. ░░ ░░ The job identifier is 238. Nov 02 08:43:31 localhost systemd[1]: Reached target rpc_pipefs.target. ░░ Subject: A start job for unit rpc_pipefs.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit rpc_pipefs.target has finished successfully. ░░ ░░ The job identifier is 237. Nov 02 08:43:31 localhost systemd[1]: Started systemd-udevd.service - Rule-based Manager for Device Events and Files. ░░ Subject: A start job for unit systemd-udevd.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-udevd.service has finished successfully. ░░ ░░ The job identifier is 138. Nov 02 08:43:31 localhost systemd[1]: Finished systemd-machine-id-commit.service - Save Transient machine-id to Disk. ░░ Subject: A start job for unit systemd-machine-id-commit.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-machine-id-commit.service has finished successfully. ░░ ░░ The job identifier is 136. Nov 02 08:43:31 localhost systemd[1]: Starting modprobe@configfs.service - Load Kernel Module configfs... ░░ Subject: A start job for unit modprobe@configfs.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit modprobe@configfs.service has begun execution. ░░ ░░ The job identifier is 295. Nov 02 08:43:31 localhost systemd[1]: modprobe@configfs.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit modprobe@configfs.service has successfully entered the 'dead' state. Nov 02 08:43:31 localhost systemd[1]: Finished modprobe@configfs.service - Load Kernel Module configfs. ░░ Subject: A start job for unit modprobe@configfs.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit modprobe@configfs.service has finished successfully. ░░ ░░ The job identifier is 295. Nov 02 08:43:31 localhost systemd[1]: Condition check resulted in dev-ttyS0.device - /dev/ttyS0 being skipped. ░░ Subject: A start job for unit dev-ttyS0.device has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit dev-ttyS0.device has finished successfully. ░░ ░░ The job identifier is 273. Nov 02 08:43:31 localhost 55-scsi-sg3_id.rules[608]: WARNING: SCSI device xvda has no device ID, consider changing .SCSI_ID_SERIAL_SRC in 00-scsi-sg3_config.rules Nov 02 08:43:31 localhost kernel: input: PC Speaker as /devices/platform/pcspkr/input/input5 Nov 02 08:43:31 localhost kernel: RAPL PMU: API unit is 2^-32 Joules, 0 fixed counters, 655360 ms ovfl timer Nov 02 08:43:31 localhost kernel: piix4_smbus 0000:00:01.3: SMBus base address uninitialized - upgrade BIOS or use force_addr=0xaddr Nov 02 08:43:31 localhost kernel: cirrus 0000:00:02.0: vgaarb: deactivate vga console Nov 02 08:43:31 localhost kernel: Console: switching to colour dummy device 80x25 Nov 02 08:43:31 localhost kernel: [drm] Initialized cirrus 2.0.0 for 0000:00:02.0 on minor 0 Nov 02 08:43:31 localhost kernel: fbcon: cirrusdrmfb (fb0) is primary device Nov 02 08:43:31 localhost kernel: Console: switching to colour frame buffer device 128x48 Nov 02 08:43:31 localhost kernel: cirrus 0000:00:02.0: [drm] fb0: cirrusdrmfb frame buffer device Nov 02 08:43:31 localhost systemd[1]: Starting systemd-vconsole-setup.service - Virtual Console Setup... ░░ Subject: A start job for unit systemd-vconsole-setup.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-vconsole-setup.service has begun execution. ░░ ░░ The job identifier is 301. Nov 02 08:43:31 localhost systemd[1]: systemd-vconsole-setup.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-vconsole-setup.service has successfully entered the 'dead' state. Nov 02 08:43:31 localhost systemd[1]: Stopped systemd-vconsole-setup.service - Virtual Console Setup. ░░ Subject: A stop job for unit systemd-vconsole-setup.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit systemd-vconsole-setup.service has finished. ░░ ░░ The job identifier is 301 and the job result is done. Nov 02 08:43:31 localhost systemd[1]: Starting systemd-vconsole-setup.service - Virtual Console Setup... ░░ Subject: A start job for unit systemd-vconsole-setup.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-vconsole-setup.service has begun execution. ░░ ░░ The job identifier is 301. Nov 02 08:43:31 localhost (udev-worker)[599]: Network interface NamePolicy= disabled on kernel command line. Nov 02 08:43:31 localhost augenrules[566]: /sbin/augenrules: No change Nov 02 08:43:31 localhost augenrules[626]: No rules Nov 02 08:43:31 localhost augenrules[626]: enabled 0 Nov 02 08:43:31 localhost augenrules[626]: failure 1 Nov 02 08:43:31 localhost augenrules[626]: pid 0 Nov 02 08:43:31 localhost augenrules[626]: rate_limit 0 Nov 02 08:43:31 localhost augenrules[626]: backlog_limit 8192 Nov 02 08:43:31 localhost augenrules[626]: lost 0 Nov 02 08:43:31 localhost augenrules[626]: backlog 0 Nov 02 08:43:31 localhost augenrules[626]: backlog_wait_time 60000 Nov 02 08:43:31 localhost augenrules[626]: backlog_wait_time_actual 0 Nov 02 08:43:31 localhost augenrules[626]: enabled 0 Nov 02 08:43:31 localhost augenrules[626]: failure 1 Nov 02 08:43:31 localhost augenrules[626]: pid 0 Nov 02 08:43:31 localhost augenrules[626]: rate_limit 0 Nov 02 08:43:31 localhost augenrules[626]: backlog_limit 8192 Nov 02 08:43:31 localhost augenrules[626]: lost 0 Nov 02 08:43:31 localhost augenrules[626]: backlog 0 Nov 02 08:43:31 localhost augenrules[626]: backlog_wait_time 60000 Nov 02 08:43:31 localhost augenrules[626]: backlog_wait_time_actual 0 Nov 02 08:43:31 localhost augenrules[626]: enabled 0 Nov 02 08:43:31 localhost augenrules[626]: failure 1 Nov 02 08:43:31 localhost augenrules[626]: pid 0 Nov 02 08:43:31 localhost augenrules[626]: rate_limit 0 Nov 02 08:43:31 localhost augenrules[626]: backlog_limit 8192 Nov 02 08:43:31 localhost augenrules[626]: lost 0 Nov 02 08:43:31 localhost augenrules[626]: backlog 0 Nov 02 08:43:31 localhost augenrules[626]: backlog_wait_time 60000 Nov 02 08:43:31 localhost augenrules[626]: backlog_wait_time_actual 0 Nov 02 08:43:31 localhost systemd[1]: audit-rules.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit audit-rules.service has successfully entered the 'dead' state. Nov 02 08:43:31 localhost systemd[1]: Finished audit-rules.service - Load Audit Rules. ░░ Subject: A start job for unit audit-rules.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit audit-rules.service has finished successfully. ░░ ░░ The job identifier is 242. Nov 02 08:43:31 localhost systemd[1]: Starting auditd.service - Security Audit Logging Service... ░░ Subject: A start job for unit auditd.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit auditd.service has begun execution. ░░ ░░ The job identifier is 241. Nov 02 08:43:31 localhost auditd[635]: No plugins found, not dispatching events Nov 02 08:43:31 localhost auditd[635]: Init complete, auditd 4.0 listening for events (startup state enable) Nov 02 08:43:31 localhost systemd[1]: Started auditd.service - Security Audit Logging Service. ░░ Subject: A start job for unit auditd.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit auditd.service has finished successfully. ░░ ░░ The job identifier is 241. Nov 02 08:43:31 localhost systemd[1]: Starting systemd-update-utmp.service - Record System Boot/Shutdown in UTMP... ░░ Subject: A start job for unit systemd-update-utmp.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-update-utmp.service has begun execution. ░░ ░░ The job identifier is 266. Nov 02 08:43:31 localhost systemd[1]: Started rpcbind.service - RPC Bind. ░░ Subject: A start job for unit rpcbind.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit rpcbind.service has finished successfully. ░░ ░░ The job identifier is 278. Nov 02 08:43:31 localhost systemd[1]: Finished systemd-update-utmp.service - Record System Boot/Shutdown in UTMP. ░░ Subject: A start job for unit systemd-update-utmp.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-update-utmp.service has finished successfully. ░░ ░░ The job identifier is 266. Nov 02 08:43:32 localhost systemd[1]: Finished systemd-vconsole-setup.service - Virtual Console Setup. ░░ Subject: A start job for unit systemd-vconsole-setup.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-vconsole-setup.service has finished successfully. ░░ ░░ The job identifier is 301. Nov 02 08:43:32 localhost systemd[1]: etc-machine\x2did.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit etc-machine\x2did.mount has successfully entered the 'dead' state. Nov 02 08:43:32 localhost systemd[1]: Reached target sysinit.target - System Initialization. ░░ Subject: A start job for unit sysinit.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sysinit.target has finished successfully. ░░ ░░ The job identifier is 125. Nov 02 08:43:32 localhost systemd[1]: Started dnf-makecache.timer - dnf makecache --timer. ░░ Subject: A start job for unit dnf-makecache.timer has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit dnf-makecache.timer has finished successfully. ░░ ░░ The job identifier is 217. Nov 02 08:43:32 localhost systemd[1]: Started fstrim.timer - Discard unused filesystem blocks once a week. ░░ Subject: A start job for unit fstrim.timer has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit fstrim.timer has finished successfully. ░░ ░░ The job identifier is 225. Nov 02 08:43:32 localhost systemd[1]: Started logrotate.timer - Daily rotation of log files. ░░ Subject: A start job for unit logrotate.timer has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit logrotate.timer has finished successfully. ░░ ░░ The job identifier is 224. Nov 02 08:43:32 localhost systemd[1]: Started systemd-tmpfiles-clean.timer - Daily Cleanup of Temporary Directories. ░░ Subject: A start job for unit systemd-tmpfiles-clean.timer has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-tmpfiles-clean.timer has finished successfully. ░░ ░░ The job identifier is 223. Nov 02 08:43:32 localhost systemd[1]: Reached target timers.target - Timer Units. ░░ Subject: A start job for unit timers.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit timers.target has finished successfully. ░░ ░░ The job identifier is 216. Nov 02 08:43:32 localhost systemd[1]: Listening on dbus.socket - D-Bus System Message Bus Socket. ░░ Subject: A start job for unit dbus.socket has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit dbus.socket has finished successfully. ░░ ░░ The job identifier is 205. Nov 02 08:43:32 localhost systemd[1]: Listening on pcscd.socket - PC/SC Smart Card Daemon Activation Socket. ░░ Subject: A start job for unit pcscd.socket has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pcscd.socket has finished successfully. ░░ ░░ The job identifier is 212. Nov 02 08:43:32 localhost systemd[1]: Listening on sssd-kcm.socket - SSSD Kerberos Cache Manager responder socket. ░░ Subject: A start job for unit sssd-kcm.socket has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sssd-kcm.socket has finished successfully. ░░ ░░ The job identifier is 203. Nov 02 08:43:32 localhost systemd[1]: Listening on systemd-hostnamed.socket - Hostname Service Socket. ░░ Subject: A start job for unit systemd-hostnamed.socket has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-hostnamed.socket has finished successfully. ░░ ░░ The job identifier is 214. Nov 02 08:43:32 localhost systemd[1]: Reached target sockets.target - Socket Units. ░░ Subject: A start job for unit sockets.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sockets.target has finished successfully. ░░ ░░ The job identifier is 201. Nov 02 08:43:32 localhost systemd[1]: Starting dbus-broker.service - D-Bus System Message Bus... ░░ Subject: A start job for unit dbus-broker.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit dbus-broker.service has begun execution. ░░ ░░ The job identifier is 221. Nov 02 08:43:32 localhost systemd[1]: systemd-pcrphase-sysinit.service - TPM PCR Barrier (Initialization) was skipped because of an unmet condition check (ConditionSecurity=measured-uki). ░░ Subject: A start job for unit systemd-pcrphase-sysinit.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-pcrphase-sysinit.service has finished successfully. ░░ ░░ The job identifier is 197. Nov 02 08:43:32 localhost systemd[1]: Started dbus-broker.service - D-Bus System Message Bus. ░░ Subject: A start job for unit dbus-broker.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit dbus-broker.service has finished successfully. ░░ ░░ The job identifier is 221. Nov 02 08:43:32 localhost systemd[1]: Reached target basic.target - Basic System. ░░ Subject: A start job for unit basic.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit basic.target has finished successfully. ░░ ░░ The job identifier is 124. Nov 02 08:43:32 localhost dbus-broker-launch[645]: Ready Nov 02 08:43:32 localhost systemd[1]: Starting chronyd.service - NTP client/server... ░░ Subject: A start job for unit chronyd.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit chronyd.service has begun execution. ░░ ░░ The job identifier is 244. Nov 02 08:43:32 localhost systemd[1]: Starting cloud-init-local.service - Initial cloud-init job (pre-networking)... ░░ Subject: A start job for unit cloud-init-local.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit cloud-init-local.service has begun execution. ░░ ░░ The job identifier is 258. Nov 02 08:43:32 localhost systemd[1]: Starting dracut-shutdown.service - Restore /run/initramfs on shutdown... ░░ Subject: A start job for unit dracut-shutdown.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit dracut-shutdown.service has begun execution. ░░ ░░ The job identifier is 148. Nov 02 08:43:32 localhost systemd[1]: Started irqbalance.service - irqbalance daemon. ░░ Subject: A start job for unit irqbalance.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit irqbalance.service has finished successfully. ░░ ░░ The job identifier is 279. Nov 02 08:43:32 localhost systemd[1]: Started rngd.service - Hardware RNG Entropy Gatherer Daemon. ░░ Subject: A start job for unit rngd.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit rngd.service has finished successfully. ░░ ░░ The job identifier is 248. Nov 02 08:43:32 localhost systemd[1]: Starting rsyslog.service - System Logging Service... ░░ Subject: A start job for unit rsyslog.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit rsyslog.service has begun execution. ░░ ░░ The job identifier is 264. Nov 02 08:43:32 localhost systemd[1]: ssh-host-keys-migration.service - Update OpenSSH host key permissions was skipped because of an unmet condition check (ConditionPathExists=!/var/lib/.ssh-host-keys-migration). ░░ Subject: A start job for unit ssh-host-keys-migration.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit ssh-host-keys-migration.service has finished successfully. ░░ ░░ The job identifier is 257. Nov 02 08:43:32 localhost systemd[1]: sshd-keygen@ecdsa.service - OpenSSH ecdsa Server Key Generation was skipped because of an unmet condition check (ConditionPathExists=!/run/systemd/generator.early/multi-user.target.wants/cloud-init.target). ░░ Subject: A start job for unit sshd-keygen@ecdsa.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sshd-keygen@ecdsa.service has finished successfully. ░░ ░░ The job identifier is 255. Nov 02 08:43:32 localhost systemd[1]: sshd-keygen@ed25519.service - OpenSSH ed25519 Server Key Generation was skipped because of an unmet condition check (ConditionPathExists=!/run/systemd/generator.early/multi-user.target.wants/cloud-init.target). ░░ Subject: A start job for unit sshd-keygen@ed25519.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sshd-keygen@ed25519.service has finished successfully. ░░ ░░ The job identifier is 253. Nov 02 08:43:32 localhost systemd[1]: sshd-keygen@rsa.service - OpenSSH rsa Server Key Generation was skipped because of an unmet condition check (ConditionPathExists=!/run/systemd/generator.early/multi-user.target.wants/cloud-init.target). ░░ Subject: A start job for unit sshd-keygen@rsa.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sshd-keygen@rsa.service has finished successfully. ░░ ░░ The job identifier is 256. Nov 02 08:43:32 localhost systemd[1]: Reached target sshd-keygen.target. ░░ Subject: A start job for unit sshd-keygen.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sshd-keygen.target has finished successfully. ░░ ░░ The job identifier is 252. Nov 02 08:43:32 localhost systemd[1]: sssd.service - System Security Services Daemon was skipped because no trigger condition checks were met. ░░ Subject: A start job for unit sssd.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sssd.service has finished successfully. ░░ ░░ The job identifier is 267. Nov 02 08:43:32 localhost systemd[1]: Reached target nss-user-lookup.target - User and Group Name Lookups. ░░ Subject: A start job for unit nss-user-lookup.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit nss-user-lookup.target has finished successfully. ░░ ░░ The job identifier is 268. Nov 02 08:43:32 localhost systemd[1]: Starting systemd-logind.service - User Login Management... ░░ Subject: A start job for unit systemd-logind.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-logind.service has begun execution. ░░ ░░ The job identifier is 228. Nov 02 08:43:32 localhost systemd[1]: Finished dracut-shutdown.service - Restore /run/initramfs on shutdown. ░░ Subject: A start job for unit dracut-shutdown.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit dracut-shutdown.service has finished successfully. ░░ ░░ The job identifier is 148. Nov 02 08:43:32 localhost (qbalance)[650]: irqbalance.service: Referenced but unset environment variable evaluates to an empty string: IRQBALANCE_ARGS Nov 02 08:43:32 localhost systemd-logind[654]: New seat seat0. ░░ Subject: A new seat seat0 is now available ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A new seat seat0 has been configured and is now available. Nov 02 08:43:32 localhost systemd-logind[654]: Watching system buttons on /dev/input/event0 (Power Button) Nov 02 08:43:32 localhost systemd-logind[654]: Watching system buttons on /dev/input/event1 (Sleep Button) Nov 02 08:43:32 localhost systemd-logind[654]: Watching system buttons on /dev/input/event2 (AT Translated Set 2 keyboard) Nov 02 08:43:32 localhost systemd[1]: Started systemd-logind.service - User Login Management. ░░ Subject: A start job for unit systemd-logind.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-logind.service has finished successfully. ░░ ░░ The job identifier is 228. Nov 02 08:43:32 localhost systemd[1]: Started rsyslog.service - System Logging Service. ░░ Subject: A start job for unit rsyslog.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit rsyslog.service has finished successfully. ░░ ░░ The job identifier is 264. Nov 02 08:43:32 localhost rsyslogd[653]: imjournal: filecreatemode is not set, using default 0644 [v8.2408.0-1.el10 try https://www.rsyslog.com/e/2186 ] Nov 02 08:43:32 localhost rsyslogd[653]: [origin software="rsyslogd" swVersion="8.2408.0-1.el10" x-pid="653" x-info="https://www.rsyslog.com"] start Nov 02 08:43:32 localhost rsyslogd[653]: imjournal: journal files changed, reloading... [v8.2408.0-1.el10 try https://www.rsyslog.com/e/0 ] Nov 02 08:43:32 localhost rngd[651]: Disabling 7: PKCS11 Entropy generator (pkcs11) Nov 02 08:43:32 localhost rngd[651]: Disabling 5: NIST Network Entropy Beacon (nist) Nov 02 08:43:32 localhost rngd[651]: Disabling 9: Qrypt quantum entropy beacon (qrypt) Nov 02 08:43:32 localhost rngd[651]: Disabling 10: Named pipe entropy input (namedpipe) Nov 02 08:43:32 localhost rngd[651]: Initializing available sources Nov 02 08:43:32 localhost rngd[651]: [hwrng ]: Initialization Failed Nov 02 08:43:32 localhost rngd[651]: [rdrand]: Enabling RDRAND rng support Nov 02 08:43:32 localhost rngd[651]: [rdrand]: Initialized Nov 02 08:43:32 localhost rngd[651]: [jitter]: JITTER timeout set to 5 sec Nov 02 08:43:32 localhost chronyd[661]: chronyd version 4.6 starting (+CMDMON +NTP +REFCLOCK +RTC +PRIVDROP +SCFILTER +SIGND +ASYNCDNS +NTS +SECHASH +IPV6 +DEBUG) Nov 02 08:43:33 localhost rngd[651]: [jitter]: Initializing AES buffer Nov 02 08:43:33 localhost chronyd[661]: Frequency 0.000 +/- 1000000.000 ppm read from /var/lib/chrony/drift Nov 02 08:43:33 localhost chronyd[661]: Loaded seccomp filter (level 2) Nov 02 08:43:33 localhost systemd[1]: Started chronyd.service - NTP client/server. ░░ Subject: A start job for unit chronyd.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit chronyd.service has finished successfully. ░░ ░░ The job identifier is 244. Nov 02 08:43:37 localhost cloud-init[668]: Cloud-init v. 24.1.4-17.el10 running 'init-local' at Sat, 02 Nov 2024 12:43:37 +0000. Up 17.35 seconds. Nov 02 08:43:37 localhost dhcpcd[670]: dhcpcd-10.0.6 starting Nov 02 08:43:37 localhost kernel: 8021q: 802.1Q VLAN Support v1.8 Nov 02 08:43:37 localhost systemd[1]: Listening on systemd-rfkill.socket - Load/Save RF Kill Switch Status /dev/rfkill Watch. ░░ Subject: A start job for unit systemd-rfkill.socket has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-rfkill.socket has finished successfully. ░░ ░░ The job identifier is 311. Nov 02 08:43:37 localhost kernel: cfg80211: Loading compiled-in X.509 certificates for regulatory database Nov 02 08:43:37 localhost kernel: Loaded X.509 cert 'sforshee: 00b28ddf47aef9cea7' Nov 02 08:43:37 localhost kernel: Loaded X.509 cert 'wens: 61c038651aabdcf94bd0ac7ff06c7248db18c600' Nov 02 08:43:37 localhost dhcpcd[673]: DUID 00:01:00:01:2e:b8:da:f9:12:f6:b2:26:b5:a7 Nov 02 08:43:37 localhost dhcpcd[673]: eth0: IAID b2:26:b5:a7 Nov 02 08:43:38 localhost rngd[651]: [jitter]: Unable to obtain AES key, disabling JITTER source Nov 02 08:43:38 localhost rngd[651]: [jitter]: Initialization Failed Nov 02 08:43:38 localhost rngd[651]: Process privileges have been dropped to 2:2 Nov 02 08:43:38 localhost kernel: platform regulatory.0: Direct firmware load for regulatory.db failed with error -2 Nov 02 08:43:38 localhost kernel: cfg80211: failed to load regulatory.db Nov 02 08:43:39 localhost dhcpcd[673]: eth0: soliciting a DHCP lease Nov 02 08:43:39 localhost dhcpcd[673]: eth0: offered 10.31.10.234 from 10.31.8.1 Nov 02 08:43:39 localhost dhcpcd[673]: eth0: leased 10.31.10.234 for 3600 seconds Nov 02 08:43:39 localhost dhcpcd[673]: eth0: adding route to 10.31.8.0/22 Nov 02 08:43:39 localhost dhcpcd[673]: eth0: adding default route via 10.31.8.1 Nov 02 08:43:39 localhost dhcpcd[673]: control command: /usr/sbin/dhcpcd --dumplease --ipv4only eth0 Nov 02 08:43:39 localhost systemd[1]: Starting systemd-hostnamed.service - Hostname Service... ░░ Subject: A start job for unit systemd-hostnamed.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-hostnamed.service has begun execution. ░░ ░░ The job identifier is 320. Nov 02 08:43:39 localhost systemd[1]: Started systemd-hostnamed.service - Hostname Service. ░░ Subject: A start job for unit systemd-hostnamed.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-hostnamed.service has finished successfully. ░░ ░░ The job identifier is 320. Nov 02 08:43:39 ip-10-31-10-234.us-east-1.aws.redhat.com systemd-hostnamed[694]: Hostname set to (static) Nov 02 08:43:39 ip-10-31-10-234.us-east-1.aws.redhat.com systemd[1]: Finished cloud-init-local.service - Initial cloud-init job (pre-networking). ░░ Subject: A start job for unit cloud-init-local.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit cloud-init-local.service has finished successfully. ░░ ░░ The job identifier is 258. Nov 02 08:43:39 ip-10-31-10-234.us-east-1.aws.redhat.com systemd[1]: Reached target network-pre.target - Preparation for Network. ░░ Subject: A start job for unit network-pre.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit network-pre.target has finished successfully. ░░ ░░ The job identifier is 164. Nov 02 08:43:39 ip-10-31-10-234.us-east-1.aws.redhat.com systemd[1]: Starting NetworkManager.service - Network Manager... ░░ Subject: A start job for unit NetworkManager.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager.service has begun execution. ░░ ░░ The job identifier is 220. Nov 02 08:43:40 ip-10-31-10-234.us-east-1.aws.redhat.com NetworkManager[701]: [1730551420.1659] NetworkManager (version 1.51.2-1.el10) is starting... (boot:23b7f8bb-1cd9-4d23-a2c1-7b7be2fd4b1a) Nov 02 08:43:40 ip-10-31-10-234.us-east-1.aws.redhat.com NetworkManager[701]: [1730551420.1661] Read config: /etc/NetworkManager/NetworkManager.conf (etc: 30-cloud-init-ip6-addr-gen-mode.conf) Nov 02 08:43:40 ip-10-31-10-234.us-east-1.aws.redhat.com NetworkManager[701]: [1730551420.1998] manager[0x55d1e28c1ab0]: monitoring kernel firmware directory '/lib/firmware'. Nov 02 08:43:40 ip-10-31-10-234.us-east-1.aws.redhat.com NetworkManager[701]: [1730551420.2025] hostname: hostname: using hostnamed Nov 02 08:43:40 ip-10-31-10-234.us-east-1.aws.redhat.com NetworkManager[701]: [1730551420.2026] hostname: static hostname changed from (none) to "ip-10-31-10-234.us-east-1.aws.redhat.com" Nov 02 08:43:40 ip-10-31-10-234.us-east-1.aws.redhat.com NetworkManager[701]: [1730551420.2029] dns-mgr: init: dns=default,systemd-resolved rc-manager=symlink (auto) Nov 02 08:43:40 ip-10-31-10-234.us-east-1.aws.redhat.com NetworkManager[701]: [1730551420.2032] manager[0x55d1e28c1ab0]: rfkill: Wi-Fi hardware radio set enabled Nov 02 08:43:40 ip-10-31-10-234.us-east-1.aws.redhat.com NetworkManager[701]: [1730551420.2032] manager[0x55d1e28c1ab0]: rfkill: WWAN hardware radio set enabled Nov 02 08:43:40 ip-10-31-10-234.us-east-1.aws.redhat.com NetworkManager[701]: [1730551420.2072] manager: rfkill: Wi-Fi enabled by radio killswitch; enabled by state file Nov 02 08:43:40 ip-10-31-10-234.us-east-1.aws.redhat.com NetworkManager[701]: [1730551420.2073] manager: rfkill: WWAN enabled by radio killswitch; enabled by state file Nov 02 08:43:40 ip-10-31-10-234.us-east-1.aws.redhat.com NetworkManager[701]: [1730551420.2073] manager: Networking is enabled by state file Nov 02 08:43:40 ip-10-31-10-234.us-east-1.aws.redhat.com NetworkManager[701]: [1730551420.2083] settings: Loaded settings plugin: keyfile (internal) Nov 02 08:43:40 ip-10-31-10-234.us-east-1.aws.redhat.com NetworkManager[701]: [1730551420.2120] dhcp: init: Using DHCP client 'internal' Nov 02 08:43:40 ip-10-31-10-234.us-east-1.aws.redhat.com NetworkManager[701]: [1730551420.2124] manager: (lo): new Loopback device (/org/freedesktop/NetworkManager/Devices/1) Nov 02 08:43:40 ip-10-31-10-234.us-east-1.aws.redhat.com NetworkManager[701]: [1730551420.2141] device (lo): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external') Nov 02 08:43:40 ip-10-31-10-234.us-east-1.aws.redhat.com NetworkManager[701]: [1730551420.2155] device (lo): state change: unavailable -> disconnected (reason 'connection-assumed', managed-type: 'external') Nov 02 08:43:40 ip-10-31-10-234.us-east-1.aws.redhat.com systemd[1]: Starting NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service... ░░ Subject: A start job for unit NetworkManager-dispatcher.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has begun execution. ░░ ░░ The job identifier is 397. Nov 02 08:43:40 ip-10-31-10-234.us-east-1.aws.redhat.com NetworkManager[701]: [1730551420.2168] device (lo): Activation: starting connection 'lo' (5682ea77-ee29-4a6c-a018-6ee48716ef1e) Nov 02 08:43:40 ip-10-31-10-234.us-east-1.aws.redhat.com NetworkManager[701]: [1730551420.2190] manager: (eth0): new Ethernet device (/org/freedesktop/NetworkManager/Devices/2) Nov 02 08:43:40 ip-10-31-10-234.us-east-1.aws.redhat.com NetworkManager[701]: [1730551420.2193] device (eth0): state change: unmanaged -> unavailable (reason 'managed', managed-type: 'external') Nov 02 08:43:40 ip-10-31-10-234.us-east-1.aws.redhat.com systemd[1]: Started NetworkManager.service - Network Manager. ░░ Subject: A start job for unit NetworkManager.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager.service has finished successfully. ░░ ░░ The job identifier is 220. Nov 02 08:43:40 ip-10-31-10-234.us-east-1.aws.redhat.com NetworkManager[701]: [1730551420.2216] bus-manager: acquired D-Bus service "org.freedesktop.NetworkManager" Nov 02 08:43:40 ip-10-31-10-234.us-east-1.aws.redhat.com systemd[1]: Reached target network.target - Network. ░░ Subject: A start job for unit network.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit network.target has finished successfully. ░░ ░░ The job identifier is 222. Nov 02 08:43:40 ip-10-31-10-234.us-east-1.aws.redhat.com NetworkManager[701]: [1730551420.2243] device (lo): state change: disconnected -> prepare (reason 'none', managed-type: 'external') Nov 02 08:43:40 ip-10-31-10-234.us-east-1.aws.redhat.com NetworkManager[701]: [1730551420.2246] device (lo): state change: prepare -> config (reason 'none', managed-type: 'external') Nov 02 08:43:40 ip-10-31-10-234.us-east-1.aws.redhat.com NetworkManager[701]: [1730551420.2247] device (lo): state change: config -> ip-config (reason 'none', managed-type: 'external') Nov 02 08:43:40 ip-10-31-10-234.us-east-1.aws.redhat.com NetworkManager[701]: [1730551420.2249] device (eth0): carrier: link connected Nov 02 08:43:40 ip-10-31-10-234.us-east-1.aws.redhat.com NetworkManager[701]: [1730551420.2251] device (lo): state change: ip-config -> ip-check (reason 'none', managed-type: 'external') Nov 02 08:43:40 ip-10-31-10-234.us-east-1.aws.redhat.com NetworkManager[701]: [1730551420.2263] device (eth0): state change: unavailable -> disconnected (reason 'carrier-changed', managed-type: 'full') Nov 02 08:43:40 ip-10-31-10-234.us-east-1.aws.redhat.com systemd[1]: Starting NetworkManager-wait-online.service - Network Manager Wait Online... ░░ Subject: A start job for unit NetworkManager-wait-online.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-wait-online.service has begun execution. ░░ ░░ The job identifier is 219. Nov 02 08:43:40 ip-10-31-10-234.us-east-1.aws.redhat.com NetworkManager[701]: [1730551420.2293] policy: auto-activating connection 'cloud-init eth0' (1dd9a779-d327-56e1-8454-c65e2556c12c) Nov 02 08:43:40 ip-10-31-10-234.us-east-1.aws.redhat.com NetworkManager[701]: [1730551420.2299] device (eth0): Activation: starting connection 'cloud-init eth0' (1dd9a779-d327-56e1-8454-c65e2556c12c) Nov 02 08:43:40 ip-10-31-10-234.us-east-1.aws.redhat.com NetworkManager[701]: [1730551420.2301] device (eth0): state change: disconnected -> prepare (reason 'none', managed-type: 'full') Nov 02 08:43:40 ip-10-31-10-234.us-east-1.aws.redhat.com NetworkManager[701]: [1730551420.2303] manager: NetworkManager state is now CONNECTING Nov 02 08:43:40 ip-10-31-10-234.us-east-1.aws.redhat.com NetworkManager[701]: [1730551420.2305] device (eth0): state change: prepare -> config (reason 'none', managed-type: 'full') Nov 02 08:43:40 ip-10-31-10-234.us-east-1.aws.redhat.com systemd[1]: Starting gssproxy.service - GSSAPI Proxy Daemon... ░░ Subject: A start job for unit gssproxy.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit gssproxy.service has begun execution. ░░ ░░ The job identifier is 235. Nov 02 08:43:40 ip-10-31-10-234.us-east-1.aws.redhat.com NetworkManager[701]: [1730551420.2327] device (eth0): state change: config -> ip-config (reason 'none', managed-type: 'full') Nov 02 08:43:40 ip-10-31-10-234.us-east-1.aws.redhat.com NetworkManager[701]: [1730551420.2332] dhcp4 (eth0): activation: beginning transaction (timeout in 45 seconds) Nov 02 08:43:40 ip-10-31-10-234.us-east-1.aws.redhat.com NetworkManager[701]: [1730551420.2344] dhcp4 (eth0): state changed new lease, address=10.31.10.234, acd pending Nov 02 08:43:40 ip-10-31-10-234.us-east-1.aws.redhat.com systemd[1]: Started NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service. ░░ Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has finished successfully. ░░ ░░ The job identifier is 397. Nov 02 08:43:40 ip-10-31-10-234.us-east-1.aws.redhat.com NetworkManager[701]: [1730551420.3262] device (lo): state change: ip-check -> secondaries (reason 'none', managed-type: 'external') Nov 02 08:43:40 ip-10-31-10-234.us-east-1.aws.redhat.com NetworkManager[701]: [1730551420.3266] device (lo): state change: secondaries -> activated (reason 'none', managed-type: 'external') Nov 02 08:43:40 ip-10-31-10-234.us-east-1.aws.redhat.com NetworkManager[701]: [1730551420.3271] device (lo): Activation: successful, device activated. Nov 02 08:43:40 ip-10-31-10-234.us-east-1.aws.redhat.com NetworkManager[701]: [1730551420.3731] dhcp4 (eth0): state changed new lease, address=10.31.10.234 Nov 02 08:43:40 ip-10-31-10-234.us-east-1.aws.redhat.com NetworkManager[701]: [1730551420.3734] policy: set 'cloud-init eth0' (eth0) as default for IPv4 routing and DNS Nov 02 08:43:40 ip-10-31-10-234.us-east-1.aws.redhat.com systemd[1]: Started gssproxy.service - GSSAPI Proxy Daemon. ░░ Subject: A start job for unit gssproxy.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit gssproxy.service has finished successfully. ░░ ░░ The job identifier is 235. Nov 02 08:43:40 ip-10-31-10-234.us-east-1.aws.redhat.com systemd[1]: rpc-gssd.service - RPC security service for NFS client and server was skipped because of an unmet condition check (ConditionPathExists=/etc/krb5.keytab). ░░ Subject: A start job for unit rpc-gssd.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit rpc-gssd.service has finished successfully. ░░ ░░ The job identifier is 236. Nov 02 08:43:40 ip-10-31-10-234.us-east-1.aws.redhat.com systemd[1]: Reached target nfs-client.target - NFS client services. ░░ Subject: A start job for unit nfs-client.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit nfs-client.target has finished successfully. ░░ ░░ The job identifier is 232. Nov 02 08:43:40 ip-10-31-10-234.us-east-1.aws.redhat.com systemd[1]: Reached target remote-fs-pre.target - Preparation for Remote File Systems. ░░ Subject: A start job for unit remote-fs-pre.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit remote-fs-pre.target has finished successfully. ░░ ░░ The job identifier is 240. Nov 02 08:43:40 ip-10-31-10-234.us-east-1.aws.redhat.com systemd[1]: Reached target remote-cryptsetup.target - Remote Encrypted Volumes. ░░ Subject: A start job for unit remote-cryptsetup.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit remote-cryptsetup.target has finished successfully. ░░ ░░ The job identifier is 243. Nov 02 08:43:40 ip-10-31-10-234.us-east-1.aws.redhat.com systemd[1]: Reached target remote-fs.target - Remote File Systems. ░░ Subject: A start job for unit remote-fs.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit remote-fs.target has finished successfully. ░░ ░░ The job identifier is 231. Nov 02 08:43:40 ip-10-31-10-234.us-east-1.aws.redhat.com systemd[1]: systemd-pcrphase.service - TPM PCR Barrier (User) was skipped because of an unmet condition check (ConditionSecurity=measured-uki). ░░ Subject: A start job for unit systemd-pcrphase.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-pcrphase.service has finished successfully. ░░ ░░ The job identifier is 172. Nov 02 08:43:40 ip-10-31-10-234.us-east-1.aws.redhat.com NetworkManager[701]: [1730551420.4131] device (eth0): state change: ip-config -> ip-check (reason 'none', managed-type: 'full') Nov 02 08:43:40 ip-10-31-10-234.us-east-1.aws.redhat.com NetworkManager[701]: [1730551420.4159] device (eth0): state change: ip-check -> secondaries (reason 'none', managed-type: 'full') Nov 02 08:43:40 ip-10-31-10-234.us-east-1.aws.redhat.com NetworkManager[701]: [1730551420.4167] device (eth0): state change: secondaries -> activated (reason 'none', managed-type: 'full') Nov 02 08:43:40 ip-10-31-10-234.us-east-1.aws.redhat.com NetworkManager[701]: [1730551420.4173] manager: NetworkManager state is now CONNECTED_SITE Nov 02 08:43:40 ip-10-31-10-234.us-east-1.aws.redhat.com NetworkManager[701]: [1730551420.4175] device (eth0): Activation: successful, device activated. Nov 02 08:43:40 ip-10-31-10-234.us-east-1.aws.redhat.com NetworkManager[701]: [1730551420.4183] manager: NetworkManager state is now CONNECTED_GLOBAL Nov 02 08:43:40 ip-10-31-10-234.us-east-1.aws.redhat.com NetworkManager[701]: [1730551420.4188] manager: startup complete Nov 02 08:43:40 ip-10-31-10-234.us-east-1.aws.redhat.com systemd[1]: Finished NetworkManager-wait-online.service - Network Manager Wait Online. ░░ Subject: A start job for unit NetworkManager-wait-online.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-wait-online.service has finished successfully. ░░ ░░ The job identifier is 219. Nov 02 08:43:40 ip-10-31-10-234.us-east-1.aws.redhat.com systemd[1]: Starting cloud-init.service - Initial cloud-init job (metadata service crawler)... ░░ Subject: A start job for unit cloud-init.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit cloud-init.service has begun execution. ░░ ░░ The job identifier is 250. Nov 02 08:43:40 ip-10-31-10-234.us-east-1.aws.redhat.com chronyd[661]: Added source 10.11.160.238 Nov 02 08:43:40 ip-10-31-10-234.us-east-1.aws.redhat.com chronyd[661]: Added source 10.18.100.10 Nov 02 08:43:40 ip-10-31-10-234.us-east-1.aws.redhat.com chronyd[661]: Added source 10.2.32.37 Nov 02 08:43:40 ip-10-31-10-234.us-east-1.aws.redhat.com chronyd[661]: Added source 10.2.32.38 Nov 02 08:43:40 ip-10-31-10-234.us-east-1.aws.redhat.com cloud-init[789]: Cloud-init v. 24.1.4-17.el10 running 'init' at Sat, 02 Nov 2024 12:43:40 +0000. Up 20.98 seconds. Nov 02 08:43:40 ip-10-31-10-234.us-east-1.aws.redhat.com cloud-init[789]: ci-info: +++++++++++++++++++++++++++++++++++++++Net device info+++++++++++++++++++++++++++++++++++++++ Nov 02 08:43:40 ip-10-31-10-234.us-east-1.aws.redhat.com cloud-init[789]: ci-info: +--------+------+------------------------------+---------------+--------+-------------------+ Nov 02 08:43:40 ip-10-31-10-234.us-east-1.aws.redhat.com cloud-init[789]: ci-info: | Device | Up | Address | Mask | Scope | Hw-Address | Nov 02 08:43:40 ip-10-31-10-234.us-east-1.aws.redhat.com cloud-init[789]: ci-info: +--------+------+------------------------------+---------------+--------+-------------------+ Nov 02 08:43:40 ip-10-31-10-234.us-east-1.aws.redhat.com cloud-init[789]: ci-info: | eth0 | True | 10.31.10.234 | 255.255.252.0 | global | 12:f6:b2:26:b5:a7 | Nov 02 08:43:40 ip-10-31-10-234.us-east-1.aws.redhat.com cloud-init[789]: ci-info: | eth0 | True | fe80::10f6:b2ff:fe26:b5a7/64 | . | link | 12:f6:b2:26:b5:a7 | Nov 02 08:43:40 ip-10-31-10-234.us-east-1.aws.redhat.com cloud-init[789]: ci-info: | lo | True | 127.0.0.1 | 255.0.0.0 | host | . | Nov 02 08:43:40 ip-10-31-10-234.us-east-1.aws.redhat.com cloud-init[789]: ci-info: | lo | True | ::1/128 | . | host | . | Nov 02 08:43:40 ip-10-31-10-234.us-east-1.aws.redhat.com cloud-init[789]: ci-info: +--------+------+------------------------------+---------------+--------+-------------------+ Nov 02 08:43:40 ip-10-31-10-234.us-east-1.aws.redhat.com cloud-init[789]: ci-info: ++++++++++++++++++++++++++++Route IPv4 info++++++++++++++++++++++++++++ Nov 02 08:43:40 ip-10-31-10-234.us-east-1.aws.redhat.com cloud-init[789]: ci-info: +-------+-------------+-----------+---------------+-----------+-------+ Nov 02 08:43:40 ip-10-31-10-234.us-east-1.aws.redhat.com cloud-init[789]: ci-info: | Route | Destination | Gateway | Genmask | Interface | Flags | Nov 02 08:43:40 ip-10-31-10-234.us-east-1.aws.redhat.com cloud-init[789]: ci-info: +-------+-------------+-----------+---------------+-----------+-------+ Nov 02 08:43:40 ip-10-31-10-234.us-east-1.aws.redhat.com cloud-init[789]: ci-info: | 0 | 0.0.0.0 | 10.31.8.1 | 0.0.0.0 | eth0 | UG | Nov 02 08:43:40 ip-10-31-10-234.us-east-1.aws.redhat.com cloud-init[789]: ci-info: | 1 | 10.31.8.0 | 0.0.0.0 | 255.255.252.0 | eth0 | U | Nov 02 08:43:40 ip-10-31-10-234.us-east-1.aws.redhat.com cloud-init[789]: ci-info: +-------+-------------+-----------+---------------+-----------+-------+ Nov 02 08:43:40 ip-10-31-10-234.us-east-1.aws.redhat.com cloud-init[789]: ci-info: +++++++++++++++++++Route IPv6 info+++++++++++++++++++ Nov 02 08:43:40 ip-10-31-10-234.us-east-1.aws.redhat.com cloud-init[789]: ci-info: +-------+-------------+---------+-----------+-------+ Nov 02 08:43:40 ip-10-31-10-234.us-east-1.aws.redhat.com cloud-init[789]: ci-info: | Route | Destination | Gateway | Interface | Flags | Nov 02 08:43:40 ip-10-31-10-234.us-east-1.aws.redhat.com cloud-init[789]: ci-info: +-------+-------------+---------+-----------+-------+ Nov 02 08:43:40 ip-10-31-10-234.us-east-1.aws.redhat.com cloud-init[789]: ci-info: | 0 | fe80::/64 | :: | eth0 | U | Nov 02 08:43:40 ip-10-31-10-234.us-east-1.aws.redhat.com cloud-init[789]: ci-info: | 2 | multicast | :: | eth0 | U | Nov 02 08:43:40 ip-10-31-10-234.us-east-1.aws.redhat.com cloud-init[789]: ci-info: +-------+-------------+---------+-----------+-------+ Nov 02 08:43:41 ip-10-31-10-234.us-east-1.aws.redhat.com 55-scsi-sg3_id.rules[856]: WARNING: SCSI device xvda has no device ID, consider changing .SCSI_ID_SERIAL_SRC in 00-scsi-sg3_config.rules Nov 02 08:43:41 ip-10-31-10-234.us-east-1.aws.redhat.com 55-scsi-sg3_id.rules[859]: WARNING: SCSI device xvda has no device ID, consider changing .SCSI_ID_SERIAL_SRC in 00-scsi-sg3_config.rules Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com cloud-init[789]: Generating public/private rsa key pair. Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com cloud-init[789]: Your identification has been saved in /etc/ssh/ssh_host_rsa_key Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com cloud-init[789]: Your public key has been saved in /etc/ssh/ssh_host_rsa_key.pub Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com cloud-init[789]: The key fingerprint is: Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com cloud-init[789]: SHA256:lcDBGP2q6I+Ch3ZziCWp1RkUEcPleVlbAjo4IowtnRc root@ip-10-31-10-234.us-east-1.aws.redhat.com Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com cloud-init[789]: The key's randomart image is: Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com cloud-init[789]: +---[RSA 3072]----+ Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com cloud-init[789]: | .E=o*+oo . | Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com cloud-init[789]: |oo .o=.o++ = | Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com cloud-init[789]: |+.=.+ = o.+ | Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com cloud-init[789]: | o o.. o .. | Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com cloud-init[789]: | .. o S. | Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com cloud-init[789]: | o..o . | Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com cloud-init[789]: |.++ .. . | Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com cloud-init[789]: |+oo+.o. | Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com cloud-init[789]: |...o=.. | Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com cloud-init[789]: +----[SHA256]-----+ Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com cloud-init[789]: Generating public/private ecdsa key pair. Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com cloud-init[789]: Your identification has been saved in /etc/ssh/ssh_host_ecdsa_key Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com cloud-init[789]: Your public key has been saved in /etc/ssh/ssh_host_ecdsa_key.pub Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com cloud-init[789]: The key fingerprint is: Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com cloud-init[789]: SHA256:gyTtstXZy8FrkZno4WmrUxGaLRSktomEltG+Wddpzsk root@ip-10-31-10-234.us-east-1.aws.redhat.com Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com cloud-init[789]: The key's randomart image is: Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com cloud-init[789]: +---[ECDSA 256]---+ Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com cloud-init[789]: | .. .o. | Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com cloud-init[789]: | .o. o. . | Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com cloud-init[789]: |.oo +.o+... | Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com cloud-init[789]: |.. + B++oO + | Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com cloud-init[789]: | . B =.S.X | Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com cloud-init[789]: | o + o.E = | Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com cloud-init[789]: | . .= = | Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com cloud-init[789]: | .. o | Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com cloud-init[789]: | .o. | Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com cloud-init[789]: +----[SHA256]-----+ Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com cloud-init[789]: Generating public/private ed25519 key pair. Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com cloud-init[789]: Your identification has been saved in /etc/ssh/ssh_host_ed25519_key Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com cloud-init[789]: Your public key has been saved in /etc/ssh/ssh_host_ed25519_key.pub Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com cloud-init[789]: The key fingerprint is: Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com cloud-init[789]: SHA256:38VttmegTaeHR0UY7ADQ2369SuOfWJhkiUcUuBBMZQo root@ip-10-31-10-234.us-east-1.aws.redhat.com Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com cloud-init[789]: The key's randomart image is: Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com cloud-init[789]: +--[ED25519 256]--+ Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com cloud-init[789]: | E+=++.+oo.| Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com cloud-init[789]: | .o+.o o. | Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com cloud-init[789]: | ..o.+ .| Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com cloud-init[789]: | ..+.o..| Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com cloud-init[789]: | S o ==.*| Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com cloud-init[789]: | . .==+Oo| Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com cloud-init[789]: | . o*+o*| Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com cloud-init[789]: | o +=o| Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com cloud-init[789]: | +oo | Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com cloud-init[789]: +----[SHA256]-----+ Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com systemd[1]: Finished cloud-init.service - Initial cloud-init job (metadata service crawler). ░░ Subject: A start job for unit cloud-init.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit cloud-init.service has finished successfully. ░░ ░░ The job identifier is 250. Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com systemd[1]: Reached target cloud-config.target - Cloud-config availability. ░░ Subject: A start job for unit cloud-config.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit cloud-config.target has finished successfully. ░░ ░░ The job identifier is 260. Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com systemd[1]: Reached target network-online.target - Network is Online. ░░ Subject: A start job for unit network-online.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit network-online.target has finished successfully. ░░ ░░ The job identifier is 218. Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com systemd[1]: Starting cloud-config.service - Apply the settings specified in cloud-config... ░░ Subject: A start job for unit cloud-config.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit cloud-config.service has begun execution. ░░ ░░ The job identifier is 259. Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com systemd[1]: Starting kdump.service - Crash recovery kernel arming... ░░ Subject: A start job for unit kdump.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit kdump.service has begun execution. ░░ ░░ The job identifier is 269. Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com systemd[1]: Starting restraintd.service - The restraint harness.... ░░ Subject: A start job for unit restraintd.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit restraintd.service has begun execution. ░░ ░░ The job identifier is 263. Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com systemd[1]: Starting rpc-statd-notify.service - Notify NFS peers of a restart... ░░ Subject: A start job for unit rpc-statd-notify.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit rpc-statd-notify.service has begun execution. ░░ ░░ The job identifier is 233. Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com systemd[1]: Starting sshd.service - OpenSSH server daemon... ░░ Subject: A start job for unit sshd.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sshd.service has begun execution. ░░ ░░ The job identifier is 251. Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com (sshd)[876]: sshd.service: Referenced but unset environment variable evaluates to an empty string: OPTIONS Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com systemd[1]: Started restraintd.service - The restraint harness.. ░░ Subject: A start job for unit restraintd.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit restraintd.service has finished successfully. ░░ ░░ The job identifier is 263. Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com sm-notify[875]: Version 2.7.1 starting Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com systemd[1]: Started rpc-statd-notify.service - Notify NFS peers of a restart. ░░ Subject: A start job for unit rpc-statd-notify.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit rpc-statd-notify.service has finished successfully. ░░ ░░ The job identifier is 233. Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com sshd[876]: Server listening on 0.0.0.0 port 22. Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com sshd[876]: Server listening on :: port 22. Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com systemd[1]: Started sshd.service - OpenSSH server daemon. ░░ Subject: A start job for unit sshd.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sshd.service has finished successfully. ░░ ░░ The job identifier is 251. Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com restraintd[880]: Listening on http://localhost:8081 Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com cloud-init[920]: Cloud-init v. 24.1.4-17.el10 running 'modules:config' at Sat, 02 Nov 2024 12:43:42 +0000. Up 22.77 seconds. Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com sshd[876]: Received signal 15; terminating. Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com systemd[1]: Stopping sshd.service - OpenSSH server daemon... ░░ Subject: A stop job for unit sshd.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit sshd.service has begun execution. ░░ ░░ The job identifier is 500. Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com systemd[1]: sshd.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit sshd.service has successfully entered the 'dead' state. Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com systemd[1]: Stopped sshd.service - OpenSSH server daemon. ░░ Subject: A stop job for unit sshd.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit sshd.service has finished. ░░ ░░ The job identifier is 500 and the job result is done. Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com systemd[1]: Stopped target sshd-keygen.target. ░░ Subject: A stop job for unit sshd-keygen.target has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit sshd-keygen.target has finished. ░░ ░░ The job identifier is 583 and the job result is done. Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com systemd[1]: Stopping sshd-keygen.target... ░░ Subject: A stop job for unit sshd-keygen.target has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit sshd-keygen.target has begun execution. ░░ ░░ The job identifier is 583. Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com systemd[1]: ssh-host-keys-migration.service - Update OpenSSH host key permissions was skipped because of an unmet condition check (ConditionPathExists=!/var/lib/.ssh-host-keys-migration). ░░ Subject: A start job for unit ssh-host-keys-migration.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit ssh-host-keys-migration.service has finished successfully. ░░ ░░ The job identifier is 582. Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com systemd[1]: sshd-keygen@ecdsa.service - OpenSSH ecdsa Server Key Generation was skipped because of an unmet condition check (ConditionPathExists=!/run/systemd/generator.early/multi-user.target.wants/cloud-init.target). ░░ Subject: A start job for unit sshd-keygen@ecdsa.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sshd-keygen@ecdsa.service has finished successfully. ░░ ░░ The job identifier is 580. Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com systemd[1]: sshd-keygen@ed25519.service - OpenSSH ed25519 Server Key Generation was skipped because of an unmet condition check (ConditionPathExists=!/run/systemd/generator.early/multi-user.target.wants/cloud-init.target). ░░ Subject: A start job for unit sshd-keygen@ed25519.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sshd-keygen@ed25519.service has finished successfully. ░░ ░░ The job identifier is 578. Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com systemd[1]: sshd-keygen@rsa.service - OpenSSH rsa Server Key Generation was skipped because of an unmet condition check (ConditionPathExists=!/run/systemd/generator.early/multi-user.target.wants/cloud-init.target). ░░ Subject: A start job for unit sshd-keygen@rsa.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sshd-keygen@rsa.service has finished successfully. ░░ ░░ The job identifier is 581. Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com systemd[1]: Reached target sshd-keygen.target. ░░ Subject: A start job for unit sshd-keygen.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sshd-keygen.target has finished successfully. ░░ ░░ The job identifier is 583. Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com systemd[1]: Starting sshd.service - OpenSSH server daemon... ░░ Subject: A start job for unit sshd.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sshd.service has begun execution. ░░ ░░ The job identifier is 500. Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com (sshd)[975]: sshd.service: Referenced but unset environment variable evaluates to an empty string: OPTIONS Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com sshd[975]: Server listening on 0.0.0.0 port 22. Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com sshd[975]: Server listening on :: port 22. Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com systemd[1]: Started sshd.service - OpenSSH server daemon. ░░ Subject: A start job for unit sshd.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sshd.service has finished successfully. ░░ ░░ The job identifier is 500. Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com kdumpctl[884]: kdump: Detected change(s) in the following file(s): /etc/fstab Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com systemd[1]: Finished cloud-config.service - Apply the settings specified in cloud-config. ░░ Subject: A start job for unit cloud-config.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit cloud-config.service has finished successfully. ░░ ░░ The job identifier is 259. Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com systemd[1]: Starting cloud-final.service - Execute cloud user/final scripts... ░░ Subject: A start job for unit cloud-final.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit cloud-final.service has begun execution. ░░ ░░ The job identifier is 261. Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com systemd[1]: Starting systemd-user-sessions.service - Permit User Sessions... ░░ Subject: A start job for unit systemd-user-sessions.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-user-sessions.service has begun execution. ░░ ░░ The job identifier is 247. Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com irqbalance[650]: Cannot change IRQ 0 affinity: Permission denied Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com irqbalance[650]: IRQ 0 affinity is now unmanaged Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com irqbalance[650]: Cannot change IRQ 48 affinity: Permission denied Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com irqbalance[650]: IRQ 48 affinity is now unmanaged Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com irqbalance[650]: Cannot change IRQ 49 affinity: Permission denied Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com irqbalance[650]: IRQ 49 affinity is now unmanaged Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com irqbalance[650]: Cannot change IRQ 50 affinity: Permission denied Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com irqbalance[650]: IRQ 50 affinity is now unmanaged Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com irqbalance[650]: Cannot change IRQ 51 affinity: Permission denied Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com irqbalance[650]: IRQ 51 affinity is now unmanaged Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com irqbalance[650]: Cannot change IRQ 52 affinity: Permission denied Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com irqbalance[650]: IRQ 52 affinity is now unmanaged Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com irqbalance[650]: Cannot change IRQ 53 affinity: Permission denied Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com irqbalance[650]: IRQ 53 affinity is now unmanaged Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com irqbalance[650]: Cannot change IRQ 54 affinity: Permission denied Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com irqbalance[650]: IRQ 54 affinity is now unmanaged Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com irqbalance[650]: Cannot change IRQ 55 affinity: Permission denied Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com irqbalance[650]: IRQ 55 affinity is now unmanaged Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com irqbalance[650]: Cannot change IRQ 56 affinity: Permission denied Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com irqbalance[650]: IRQ 56 affinity is now unmanaged Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com irqbalance[650]: Cannot change IRQ 57 affinity: Permission denied Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com irqbalance[650]: IRQ 57 affinity is now unmanaged Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com irqbalance[650]: Cannot change IRQ 58 affinity: Permission denied Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com irqbalance[650]: IRQ 58 affinity is now unmanaged Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com irqbalance[650]: Cannot change IRQ 59 affinity: Permission denied Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com irqbalance[650]: IRQ 59 affinity is now unmanaged Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com systemd[1]: Finished systemd-user-sessions.service - Permit User Sessions. ░░ Subject: A start job for unit systemd-user-sessions.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-user-sessions.service has finished successfully. ░░ ░░ The job identifier is 247. Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com systemd[1]: Started crond.service - Command Scheduler. ░░ Subject: A start job for unit crond.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit crond.service has finished successfully. ░░ ░░ The job identifier is 262. Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com systemd[1]: Started getty@tty1.service - Getty on tty1. ░░ Subject: A start job for unit getty@tty1.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit getty@tty1.service has finished successfully. ░░ ░░ The job identifier is 276. Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com systemd[1]: Started serial-getty@ttyS0.service - Serial Getty on ttyS0. ░░ Subject: A start job for unit serial-getty@ttyS0.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit serial-getty@ttyS0.service has finished successfully. ░░ ░░ The job identifier is 271. Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com systemd[1]: Reached target getty.target - Login Prompts. ░░ Subject: A start job for unit getty.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit getty.target has finished successfully. ░░ ░░ The job identifier is 270. Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com systemd[1]: Reached target multi-user.target - Multi-User System. ░░ Subject: A start job for unit multi-user.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit multi-user.target has finished successfully. ░░ ░░ The job identifier is 123. Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com systemd[1]: Starting systemd-update-utmp-runlevel.service - Record Runlevel Change in UTMP... ░░ Subject: A start job for unit systemd-update-utmp-runlevel.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-update-utmp-runlevel.service has begun execution. ░░ ░░ The job identifier is 265. Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com systemd[1]: systemd-update-utmp-runlevel.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-update-utmp-runlevel.service has successfully entered the 'dead' state. Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com systemd[1]: Finished systemd-update-utmp-runlevel.service - Record Runlevel Change in UTMP. ░░ Subject: A start job for unit systemd-update-utmp-runlevel.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-update-utmp-runlevel.service has finished successfully. ░░ ░░ The job identifier is 265. Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com crond[1047]: (CRON) STARTUP (1.7.0) Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com crond[1047]: (CRON) INFO (Syslog will be used instead of sendmail.) Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com crond[1047]: (CRON) INFO (RANDOM_DELAY will be scaled with factor 14% if used.) Nov 02 08:43:42 ip-10-31-10-234.us-east-1.aws.redhat.com crond[1047]: (CRON) INFO (running with inotify support) Nov 02 08:43:43 ip-10-31-10-234.us-east-1.aws.redhat.com cloud-init[1140]: Cloud-init v. 24.1.4-17.el10 running 'modules:final' at Sat, 02 Nov 2024 12:43:43 +0000. Up 23.37 seconds. Nov 02 08:43:43 ip-10-31-10-234.us-east-1.aws.redhat.com cloud-init[1142]: ############################################################# Nov 02 08:43:43 ip-10-31-10-234.us-east-1.aws.redhat.com cloud-init[1143]: -----BEGIN SSH HOST KEY FINGERPRINTS----- Nov 02 08:43:43 ip-10-31-10-234.us-east-1.aws.redhat.com cloud-init[1145]: 256 SHA256:gyTtstXZy8FrkZno4WmrUxGaLRSktomEltG+Wddpzsk root@ip-10-31-10-234.us-east-1.aws.redhat.com (ECDSA) Nov 02 08:43:43 ip-10-31-10-234.us-east-1.aws.redhat.com cloud-init[1147]: 256 SHA256:38VttmegTaeHR0UY7ADQ2369SuOfWJhkiUcUuBBMZQo root@ip-10-31-10-234.us-east-1.aws.redhat.com (ED25519) Nov 02 08:43:43 ip-10-31-10-234.us-east-1.aws.redhat.com cloud-init[1149]: 3072 SHA256:lcDBGP2q6I+Ch3ZziCWp1RkUEcPleVlbAjo4IowtnRc root@ip-10-31-10-234.us-east-1.aws.redhat.com (RSA) Nov 02 08:43:43 ip-10-31-10-234.us-east-1.aws.redhat.com cloud-init[1150]: -----END SSH HOST KEY FINGERPRINTS----- Nov 02 08:43:43 ip-10-31-10-234.us-east-1.aws.redhat.com cloud-init[1151]: ############################################################# Nov 02 08:43:43 ip-10-31-10-234.us-east-1.aws.redhat.com cloud-init[1140]: Cloud-init v. 24.1.4-17.el10 finished at Sat, 02 Nov 2024 12:43:43 +0000. Datasource DataSourceEc2Local. Up 23.49 seconds Nov 02 08:43:43 ip-10-31-10-234.us-east-1.aws.redhat.com systemd[1]: Finished cloud-final.service - Execute cloud user/final scripts. ░░ Subject: A start job for unit cloud-final.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit cloud-final.service has finished successfully. ░░ ░░ The job identifier is 261. Nov 02 08:43:43 ip-10-31-10-234.us-east-1.aws.redhat.com systemd[1]: Reached target cloud-init.target - Cloud-init target. ░░ Subject: A start job for unit cloud-init.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit cloud-init.target has finished successfully. ░░ ░░ The job identifier is 249. Nov 02 08:43:46 ip-10-31-10-234.us-east-1.aws.redhat.com chronyd[661]: Selected source 173.73.96.68 (2.centos.pool.ntp.org) Nov 02 08:43:46 ip-10-31-10-234.us-east-1.aws.redhat.com kernel: block xvda: the capability attribute has been deprecated. Nov 02 08:43:46 ip-10-31-10-234.us-east-1.aws.redhat.com kdumpctl[884]: kdump: Rebuilding /boot/initramfs-6.11.0-26.el10.x86_64kdump.img Nov 02 08:43:47 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1387]: dracut-102-3.el10 Nov 02 08:43:47 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1390]: Executing: /usr/bin/dracut --list-modules Nov 02 08:43:49 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1487]: dracut-102-3.el10 Nov 02 08:43:49 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: Executing: /usr/bin/dracut --add kdumpbase --quiet --hostonly --hostonly-cmdline --hostonly-i18n --hostonly-mode strict --hostonly-nics --aggressive-strip --omit "rdma plymouth resume ifcfg earlykdump" --mount "/dev/disk/by-uuid/70689318-97f1-4727-890d-5d50652a95d6 /sysroot xfs rw,relatime,seclabel,attr2,inode64,logbufs=8,logbsize=32k,noquota" --add squash-squashfs --squash-compressor zstd --no-hostonly-default-device -f /boot/initramfs-6.11.0-26.el10.x86_64kdump.img 6.11.0-26.el10.x86_64 Nov 02 08:43:49 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: Module 'systemd-bsod' will not be installed, because command '/usr/lib/systemd/systemd-bsod' could not be found! Nov 02 08:43:49 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: Module 'systemd-networkd' will not be installed, because command '/usr/lib/systemd/systemd-networkd' could not be found! Nov 02 08:43:49 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: Module 'systemd-networkd' will not be installed, because command '/usr/lib/systemd/systemd-networkd-wait-online' could not be found! Nov 02 08:43:49 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: Module 'systemd-pcrphase' will not be installed, because command '/usr/lib/systemd/systemd-pcrphase' could not be found! Nov 02 08:43:49 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: Module 'systemd-portabled' will not be installed, because command 'portablectl' could not be found! Nov 02 08:43:49 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: Module 'systemd-portabled' will not be installed, because command '/usr/lib/systemd/systemd-portabled' could not be found! Nov 02 08:43:49 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: Module 'systemd-resolved' will not be installed, because command '/usr/lib/systemd/systemd-resolved' could not be found! Nov 02 08:43:49 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: Module 'systemd-timesyncd' will not be installed, because command '/usr/lib/systemd/systemd-timesyncd' could not be found! Nov 02 08:43:49 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: Module 'systemd-timesyncd' will not be installed, because command '/usr/lib/systemd/systemd-time-wait-sync' could not be found! Nov 02 08:43:49 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: Module 'busybox' will not be installed, because command 'busybox' could not be found! Nov 02 08:43:49 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: Module 'dbus-daemon' will not be installed, because command 'dbus-daemon' could not be found! Nov 02 08:43:49 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: Module 'connman' will not be installed, because command 'connmand' could not be found! Nov 02 08:43:49 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: Module 'connman' will not be installed, because command 'connmanctl' could not be found! Nov 02 08:43:49 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: Module 'connman' will not be installed, because command 'connmand-wait-online' could not be found! Nov 02 08:43:49 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: Module 'ifcfg' will not be installed, because it's in the list to be omitted! Nov 02 08:43:49 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: Module 'plymouth' will not be installed, because it's in the list to be omitted! Nov 02 08:43:49 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: 62bluetooth: Could not find any command of '/usr/lib/bluetooth/bluetoothd /usr/libexec/bluetooth/bluetoothd'! Nov 02 08:43:49 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: Module 'btrfs' will not be installed, because command 'btrfs' could not be found! Nov 02 08:43:49 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: Module 'dmraid' will not be installed, because command 'dmraid' could not be found! Nov 02 08:43:49 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: Module 'mdraid' will not be installed, because command 'mdadm' could not be found! Nov 02 08:43:49 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: Module 'cifs' will not be installed, because command 'mount.cifs' could not be found! Nov 02 08:43:49 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: Module 'iscsi' will not be installed, because command 'iscsi-iname' could not be found! Nov 02 08:43:49 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: Module 'iscsi' will not be installed, because command 'iscsiadm' could not be found! Nov 02 08:43:49 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: Module 'iscsi' will not be installed, because command 'iscsid' could not be found! Nov 02 08:43:49 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: Module 'nvmf' will not be installed, because command 'nvme' could not be found! Nov 02 08:43:49 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: Module 'resume' will not be installed, because it's in the list to be omitted! Nov 02 08:43:49 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: Module 'squash-erofs' will not be installed, because command 'mkfs.erofs' could not be found! Nov 02 08:43:49 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: Module 'squash-erofs' will not be installed, because command 'fsck.erofs' could not be found! Nov 02 08:43:49 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: Module 'biosdevname' will not be installed, because command 'biosdevname' could not be found! Nov 02 08:43:49 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: Module 'earlykdump' will not be installed, because it's in the list to be omitted! Nov 02 08:43:50 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: Module 'systemd-bsod' will not be installed, because command '/usr/lib/systemd/systemd-bsod' could not be found! Nov 02 08:43:50 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: Module 'systemd-portabled' will not be installed, because command 'portablectl' could not be found! Nov 02 08:43:50 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: Module 'systemd-portabled' will not be installed, because command '/usr/lib/systemd/systemd-portabled' could not be found! Nov 02 08:43:50 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: Module 'systemd-resolved' will not be installed, because command '/usr/lib/systemd/systemd-resolved' could not be found! Nov 02 08:43:50 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: Module 'systemd-timesyncd' will not be installed, because command '/usr/lib/systemd/systemd-timesyncd' could not be found! Nov 02 08:43:50 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: Module 'systemd-timesyncd' will not be installed, because command '/usr/lib/systemd/systemd-time-wait-sync' could not be found! Nov 02 08:43:50 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: Module 'busybox' will not be installed, because command 'busybox' could not be found! Nov 02 08:43:50 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: Module 'dbus-daemon' will not be installed, because command 'dbus-daemon' could not be found! Nov 02 08:43:50 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: Module 'connman' will not be installed, because command 'connmand' could not be found! Nov 02 08:43:50 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: Module 'connman' will not be installed, because command 'connmanctl' could not be found! Nov 02 08:43:50 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: Module 'connman' will not be installed, because command 'connmand-wait-online' could not be found! Nov 02 08:43:50 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: 62bluetooth: Could not find any command of '/usr/lib/bluetooth/bluetoothd /usr/libexec/bluetooth/bluetoothd'! Nov 02 08:43:50 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: Module 'btrfs' will not be installed, because command 'btrfs' could not be found! Nov 02 08:43:50 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: Module 'dmraid' will not be installed, because command 'dmraid' could not be found! Nov 02 08:43:50 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: Module 'mdraid' will not be installed, because command 'mdadm' could not be found! Nov 02 08:43:50 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: Module 'cifs' will not be installed, because command 'mount.cifs' could not be found! Nov 02 08:43:50 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: Module 'iscsi' will not be installed, because command 'iscsi-iname' could not be found! Nov 02 08:43:50 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: Module 'iscsi' will not be installed, because command 'iscsiadm' could not be found! Nov 02 08:43:50 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: Module 'iscsi' will not be installed, because command 'iscsid' could not be found! Nov 02 08:43:50 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: Module 'nvmf' will not be installed, because command 'nvme' could not be found! Nov 02 08:43:50 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: Module 'squash-erofs' will not be installed, because command 'mkfs.erofs' could not be found! Nov 02 08:43:50 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: Module 'squash-erofs' will not be installed, because command 'fsck.erofs' could not be found! Nov 02 08:43:50 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: *** Including module: systemd *** Nov 02 08:43:50 ip-10-31-10-234.us-east-1.aws.redhat.com systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state. Nov 02 08:43:50 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: *** Including module: fips *** Nov 02 08:43:51 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: *** Including module: systemd-ask-password *** Nov 02 08:43:51 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: *** Including module: systemd-initrd *** Nov 02 08:43:51 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: *** Including module: systemd-journald *** Nov 02 08:43:51 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: *** Including module: systemd-modules-load *** Nov 02 08:43:51 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: *** Including module: systemd-pcrphase *** Nov 02 08:43:51 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: *** Including module: systemd-sysctl *** Nov 02 08:43:51 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: *** Including module: systemd-sysusers *** Nov 02 08:43:51 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: *** Including module: systemd-tmpfiles *** Nov 02 08:43:51 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: *** Including module: systemd-udevd *** Nov 02 08:43:51 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: *** Including module: rngd *** Nov 02 08:43:51 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: *** Including module: i18n *** Nov 02 08:43:51 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: *** Including module: drm *** Nov 02 08:43:51 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: *** Including module: prefixdevname *** Nov 02 08:43:51 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: *** Including module: kernel-modules *** Nov 02 08:43:51 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: *** Including module: kernel-modules-extra *** Nov 02 08:43:51 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: kernel-modules-extra: configuration source "/run/depmod.d" does not exist Nov 02 08:43:51 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: kernel-modules-extra: configuration source "/lib/depmod.d" does not exist Nov 02 08:43:51 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: kernel-modules-extra: parsing configuration file "/etc/depmod.d/dist.conf" Nov 02 08:43:51 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: kernel-modules-extra: /etc/depmod.d/dist.conf: added "updates extra built-in weak-updates" to the list of search directories Nov 02 08:43:51 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: *** Including module: pcmcia *** Nov 02 08:43:51 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: Skipping udev rule: 60-pcmcia.rules Nov 02 08:43:51 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: *** Including module: tpm2-tss *** Nov 02 08:43:52 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: *** Including module: fstab-sys *** Nov 02 08:43:52 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: *** Including module: rootfs-block *** Nov 02 08:43:52 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: *** Including module: squash-squashfs *** Nov 02 08:43:52 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: *** Including module: terminfo *** Nov 02 08:43:52 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: *** Including module: udev-rules *** Nov 02 08:43:52 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: *** Including module: dracut-systemd *** Nov 02 08:43:52 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: *** Including module: usrmount *** Nov 02 08:43:52 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: *** Including module: base *** Nov 02 08:43:52 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: *** Including module: fs-lib *** Nov 02 08:43:52 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: *** Including module: kdumpbase *** Nov 02 08:43:52 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: *** Including module: memstrack *** Nov 02 08:43:52 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: *** Including module: microcode_ctl-fw_dir_override *** Nov 02 08:43:52 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: microcode_ctl module: mangling fw_dir Nov 02 08:43:52 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: microcode_ctl: reset fw_dir to "/lib/firmware/updates /lib/firmware" Nov 02 08:43:52 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: microcode_ctl: processing data directory "/usr/share/microcode_ctl/ucode_with_caveats/intel"... Nov 02 08:43:52 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: microcode_ctl: intel: caveats check for kernel version "6.11.0-26.el10.x86_64" passed, adding "/usr/share/microcode_ctl/ucode_with_caveats/intel" to fw_dir variable Nov 02 08:43:52 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: microcode_ctl: processing data directory "/usr/share/microcode_ctl/ucode_with_caveats/intel-06-4f-01"... Nov 02 08:43:52 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: microcode_ctl: configuration "intel-06-4f-01" is ignored Nov 02 08:43:52 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: microcode_ctl: final fw_dir: "/usr/share/microcode_ctl/ucode_with_caveats/intel /lib/firmware/updates /lib/firmware" Nov 02 08:43:52 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: *** Including module: shutdown *** Nov 02 08:43:52 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: *** Including module: squash-lib *** Nov 02 08:43:53 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: *** Including modules done *** Nov 02 08:43:53 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: *** Installing kernel module dependencies *** Nov 02 08:43:53 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: *** Installing kernel module dependencies done *** Nov 02 08:43:53 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: *** Resolving executable dependencies *** Nov 02 08:43:55 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: *** Resolving executable dependencies done *** Nov 02 08:43:55 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: *** Hardlinking files *** Nov 02 08:43:55 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: Mode: real Nov 02 08:43:55 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: Method: sha256 Nov 02 08:43:55 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: Files: 558 Nov 02 08:43:55 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: Linked: 23 files Nov 02 08:43:55 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: Compared: 0 xattrs Nov 02 08:43:55 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: Compared: 40 files Nov 02 08:43:55 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: Saved: 13.56 MiB Nov 02 08:43:55 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: Duration: 0.148790 seconds Nov 02 08:43:55 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: *** Hardlinking files done *** Nov 02 08:43:55 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: *** Generating early-microcode cpio image *** Nov 02 08:43:55 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: *** Constructing GenuineIntel.bin *** Nov 02 08:43:55 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: *** Constructing GenuineIntel.bin *** Nov 02 08:43:55 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: *** Store current command line parameters *** Nov 02 08:43:55 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: Stored kernel commandline: Nov 02 08:43:55 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: No dracut internal kernel commandline stored in the initramfs Nov 02 08:43:55 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: *** Squashing the files inside the initramfs *** Nov 02 08:44:03 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: *** Squashing the files inside the initramfs done *** Nov 02 08:44:03 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: *** Creating image file '/boot/initramfs-6.11.0-26.el10.x86_64kdump.img' *** Nov 02 08:44:03 ip-10-31-10-234.us-east-1.aws.redhat.com dracut[1490]: *** Creating initramfs image file '/boot/initramfs-6.11.0-26.el10.x86_64kdump.img' done *** Nov 02 08:44:03 ip-10-31-10-234.us-east-1.aws.redhat.com kernel: PKCS7: Message signed outside of X.509 validity window Nov 02 08:44:04 ip-10-31-10-234.us-east-1.aws.redhat.com kdumpctl[884]: kdump: kexec: loaded kdump kernel Nov 02 08:44:04 ip-10-31-10-234.us-east-1.aws.redhat.com kdumpctl[884]: kdump: Starting kdump: [OK] Nov 02 08:44:04 ip-10-31-10-234.us-east-1.aws.redhat.com kdumpctl[884]: kdump: Notice: No vmcore creation test performed! Nov 02 08:44:04 ip-10-31-10-234.us-east-1.aws.redhat.com systemd[1]: Finished kdump.service - Crash recovery kernel arming. ░░ Subject: A start job for unit kdump.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit kdump.service has finished successfully. ░░ ░░ The job identifier is 269. Nov 02 08:44:04 ip-10-31-10-234.us-east-1.aws.redhat.com systemd[1]: Startup finished in 1.019s (kernel) + 4.503s (initrd) + 39.346s (userspace) = 44.869s. ░░ Subject: System start-up is now complete ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ All system services necessary queued for starting at boot have been ░░ started. Note that this does not mean that the machine is now idle as services ░░ might still be busy with completing start-up. ░░ ░░ Kernel start-up required 1019825 microseconds. ░░ ░░ Initrd start-up required 4503021 microseconds. ░░ ░░ Userspace start-up required 39346512 microseconds. Nov 02 08:44:10 ip-10-31-10-234.us-east-1.aws.redhat.com systemd[1]: systemd-hostnamed.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-hostnamed.service has successfully entered the 'dead' state. Nov 02 08:47:06 ip-10-31-10-234.us-east-1.aws.redhat.com sshd-session[4300]: Accepted publickey for root from 10.30.32.230 port 49586 ssh2: RSA SHA256:W3cSdmPJK+d9RwU97ardijPXIZnxHswrpTHWW9oYtEU Nov 02 08:47:06 ip-10-31-10-234.us-east-1.aws.redhat.com sshd-session[4300]: pam_systemd(sshd:session): New sd-bus connection (system-bus-pam-systemd-4300) opened. Nov 02 08:47:06 ip-10-31-10-234.us-east-1.aws.redhat.com systemd[1]: Created slice user-0.slice - User Slice of UID 0. ░░ Subject: A start job for unit user-0.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user-0.slice has finished successfully. ░░ ░░ The job identifier is 670. Nov 02 08:47:06 ip-10-31-10-234.us-east-1.aws.redhat.com systemd[1]: Starting user-runtime-dir@0.service - User Runtime Directory /run/user/0... ░░ Subject: A start job for unit user-runtime-dir@0.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user-runtime-dir@0.service has begun execution. ░░ ░░ The job identifier is 593. Nov 02 08:47:06 ip-10-31-10-234.us-east-1.aws.redhat.com systemd-logind[654]: New session 1 of user root. ░░ Subject: A new session 1 has been created for user root ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A new session with the ID 1 has been created for the user root. ░░ ░░ The leading process of the session is 4300. Nov 02 08:47:06 ip-10-31-10-234.us-east-1.aws.redhat.com systemd[1]: Finished user-runtime-dir@0.service - User Runtime Directory /run/user/0. ░░ Subject: A start job for unit user-runtime-dir@0.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user-runtime-dir@0.service has finished successfully. ░░ ░░ The job identifier is 593. Nov 02 08:47:06 ip-10-31-10-234.us-east-1.aws.redhat.com systemd[1]: Starting user@0.service - User Manager for UID 0... ░░ Subject: A start job for unit user@0.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user@0.service has begun execution. ░░ ░░ The job identifier is 672. Nov 02 08:47:06 ip-10-31-10-234.us-east-1.aws.redhat.com systemd-logind[654]: New session 2 of user root. ░░ Subject: A new session 2 has been created for user root ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A new session with the ID 2 has been created for the user root. ░░ ░░ The leading process of the session is 4305. Nov 02 08:47:06 ip-10-31-10-234.us-east-1.aws.redhat.com (systemd)[4305]: pam_unix(systemd-user:session): session opened for user root(uid=0) by root(uid=0) Nov 02 08:47:06 ip-10-31-10-234.us-east-1.aws.redhat.com systemd[4305]: Queued start job for default target default.target. Nov 02 08:47:06 ip-10-31-10-234.us-east-1.aws.redhat.com systemd[4305]: Created slice app.slice - User Application Slice. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 5. Nov 02 08:47:06 ip-10-31-10-234.us-east-1.aws.redhat.com systemd[4305]: grub-boot-success.timer - Mark boot as successful after the user session has run 2 minutes was skipped because of an unmet condition check (ConditionUser=!@system). ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 11. Nov 02 08:47:06 ip-10-31-10-234.us-east-1.aws.redhat.com systemd[4305]: Started systemd-tmpfiles-clean.timer - Daily Cleanup of User's Temporary Directories. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 12. Nov 02 08:47:06 ip-10-31-10-234.us-east-1.aws.redhat.com systemd[4305]: Reached target paths.target - Paths. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 8. Nov 02 08:47:06 ip-10-31-10-234.us-east-1.aws.redhat.com systemd[4305]: Reached target timers.target - Timers. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 10. Nov 02 08:47:06 ip-10-31-10-234.us-east-1.aws.redhat.com systemd[4305]: Starting dbus.socket - D-Bus User Message Bus Socket... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 4. Nov 02 08:47:06 ip-10-31-10-234.us-east-1.aws.redhat.com systemd[4305]: Starting systemd-tmpfiles-setup.service - Create User Files and Directories... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 9. Nov 02 08:47:06 ip-10-31-10-234.us-east-1.aws.redhat.com systemd[4305]: Listening on dbus.socket - D-Bus User Message Bus Socket. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 4. Nov 02 08:47:06 ip-10-31-10-234.us-east-1.aws.redhat.com systemd[4305]: Reached target sockets.target - Sockets. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 3. Nov 02 08:47:06 ip-10-31-10-234.us-east-1.aws.redhat.com systemd[4305]: Finished systemd-tmpfiles-setup.service - Create User Files and Directories. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 9. Nov 02 08:47:06 ip-10-31-10-234.us-east-1.aws.redhat.com systemd[4305]: Reached target basic.target - Basic System. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 2. Nov 02 08:47:06 ip-10-31-10-234.us-east-1.aws.redhat.com systemd[4305]: Reached target default.target - Main User Target. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 1. Nov 02 08:47:06 ip-10-31-10-234.us-east-1.aws.redhat.com systemd[4305]: Startup finished in 119ms. ░░ Subject: User manager start-up is now complete ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The user manager instance for user 0 has been started. All services queued ░░ for starting have been started. Note that other services might still be starting ░░ up or be started at any later time. ░░ ░░ Startup of the manager took 119247 microseconds. Nov 02 08:47:06 ip-10-31-10-234.us-east-1.aws.redhat.com systemd[1]: Started user@0.service - User Manager for UID 0. ░░ Subject: A start job for unit user@0.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user@0.service has finished successfully. ░░ ░░ The job identifier is 672. Nov 02 08:47:06 ip-10-31-10-234.us-east-1.aws.redhat.com systemd[1]: Started session-1.scope - Session 1 of User root. ░░ Subject: A start job for unit session-1.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit session-1.scope has finished successfully. ░░ ░░ The job identifier is 752. Nov 02 08:47:06 ip-10-31-10-234.us-east-1.aws.redhat.com sshd-session[4300]: pam_unix(sshd:session): session opened for user root(uid=0) by root(uid=0) Nov 02 08:47:06 ip-10-31-10-234.us-east-1.aws.redhat.com sshd-session[4316]: Received disconnect from 10.30.32.230 port 49586:11: disconnected by user Nov 02 08:47:06 ip-10-31-10-234.us-east-1.aws.redhat.com sshd-session[4316]: Disconnected from user root 10.30.32.230 port 49586 Nov 02 08:47:06 ip-10-31-10-234.us-east-1.aws.redhat.com sshd-session[4300]: pam_systemd(sshd:session): New sd-bus connection (system-bus-pam-systemd-4300) opened. Nov 02 08:47:06 ip-10-31-10-234.us-east-1.aws.redhat.com sshd-session[4300]: pam_unix(sshd:session): session closed for user root Nov 02 08:47:06 ip-10-31-10-234.us-east-1.aws.redhat.com systemd[1]: session-1.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit session-1.scope has successfully entered the 'dead' state. Nov 02 08:47:06 ip-10-31-10-234.us-east-1.aws.redhat.com systemd-logind[654]: Session 1 logged out. Waiting for processes to exit. Nov 02 08:47:06 ip-10-31-10-234.us-east-1.aws.redhat.com systemd-logind[654]: Removed session 1. ░░ Subject: Session 1 has been terminated ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A session with the ID 1 has been terminated. Nov 02 08:47:07 ip-10-31-10-234.us-east-1.aws.redhat.com sshd-session[4348]: Accepted publickey for root from 10.31.9.101 port 44226 ssh2: RSA SHA256:W3cSdmPJK+d9RwU97ardijPXIZnxHswrpTHWW9oYtEU Nov 02 08:47:07 ip-10-31-10-234.us-east-1.aws.redhat.com sshd-session[4349]: Accepted publickey for root from 10.31.9.101 port 44228 ssh2: RSA SHA256:W3cSdmPJK+d9RwU97ardijPXIZnxHswrpTHWW9oYtEU Nov 02 08:47:07 ip-10-31-10-234.us-east-1.aws.redhat.com sshd-session[4348]: pam_systemd(sshd:session): New sd-bus connection (system-bus-pam-systemd-4348) opened. Nov 02 08:47:07 ip-10-31-10-234.us-east-1.aws.redhat.com sshd-session[4349]: pam_systemd(sshd:session): New sd-bus connection (system-bus-pam-systemd-4349) opened. Nov 02 08:47:07 ip-10-31-10-234.us-east-1.aws.redhat.com systemd-logind[654]: New session 3 of user root. ░░ Subject: A new session 3 has been created for user root ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A new session with the ID 3 has been created for the user root. ░░ ░░ The leading process of the session is 4348. Nov 02 08:47:07 ip-10-31-10-234.us-east-1.aws.redhat.com systemd[1]: Started session-3.scope - Session 3 of User root. ░░ Subject: A start job for unit session-3.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit session-3.scope has finished successfully. ░░ ░░ The job identifier is 833. Nov 02 08:47:07 ip-10-31-10-234.us-east-1.aws.redhat.com systemd-logind[654]: New session 4 of user root. ░░ Subject: A new session 4 has been created for user root ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A new session with the ID 4 has been created for the user root. ░░ ░░ The leading process of the session is 4349. Nov 02 08:47:07 ip-10-31-10-234.us-east-1.aws.redhat.com systemd[1]: Started session-4.scope - Session 4 of User root. ░░ Subject: A start job for unit session-4.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit session-4.scope has finished successfully. ░░ ░░ The job identifier is 914. Nov 02 08:47:07 ip-10-31-10-234.us-east-1.aws.redhat.com sshd-session[4348]: pam_unix(sshd:session): session opened for user root(uid=0) by root(uid=0) Nov 02 08:47:07 ip-10-31-10-234.us-east-1.aws.redhat.com sshd-session[4349]: pam_unix(sshd:session): session opened for user root(uid=0) by root(uid=0) Nov 02 08:47:07 ip-10-31-10-234.us-east-1.aws.redhat.com sshd-session[4355]: Received disconnect from 10.31.9.101 port 44228:11: disconnected by user Nov 02 08:47:07 ip-10-31-10-234.us-east-1.aws.redhat.com sshd-session[4355]: Disconnected from user root 10.31.9.101 port 44228 Nov 02 08:47:07 ip-10-31-10-234.us-east-1.aws.redhat.com sshd-session[4349]: pam_systemd(sshd:session): New sd-bus connection (system-bus-pam-systemd-4349) opened. Nov 02 08:47:07 ip-10-31-10-234.us-east-1.aws.redhat.com sshd-session[4349]: pam_unix(sshd:session): session closed for user root Nov 02 08:47:07 ip-10-31-10-234.us-east-1.aws.redhat.com systemd[1]: session-4.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit session-4.scope has successfully entered the 'dead' state. Nov 02 08:47:07 ip-10-31-10-234.us-east-1.aws.redhat.com systemd-logind[654]: Session 4 logged out. Waiting for processes to exit. Nov 02 08:47:07 ip-10-31-10-234.us-east-1.aws.redhat.com systemd-logind[654]: Removed session 4. ░░ Subject: Session 4 has been terminated ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A session with the ID 4 has been terminated. Nov 02 08:47:36 ip-10-31-10-234.us-east-1.aws.redhat.com systemd[1]: Starting systemd-hostnamed.service - Hostname Service... ░░ Subject: A start job for unit systemd-hostnamed.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-hostnamed.service has begun execution. ░░ ░░ The job identifier is 995. Nov 02 08:47:36 ip-10-31-10-234.us-east-1.aws.redhat.com systemd[1]: Started systemd-hostnamed.service - Hostname Service. ░░ Subject: A start job for unit systemd-hostnamed.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-hostnamed.service has finished successfully. ░░ ░░ The job identifier is 995. Nov 02 08:47:36 managed-node3 systemd-hostnamed[5688]: Hostname set to (static) Nov 02 08:47:36 managed-node3 NetworkManager[701]: [1730551656.7419] hostname: static hostname changed from "ip-10-31-10-234.us-east-1.aws.redhat.com" to "managed-node3" Nov 02 08:47:36 managed-node3 systemd[1]: Starting NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service... ░░ Subject: A start job for unit NetworkManager-dispatcher.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has begun execution. ░░ ░░ The job identifier is 1072. Nov 02 08:47:36 managed-node3 systemd[1]: Started NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service. ░░ Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has finished successfully. ░░ ░░ The job identifier is 1072. Nov 02 08:47:46 managed-node3 systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state. Nov 02 08:48:06 managed-node3 systemd[1]: systemd-hostnamed.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-hostnamed.service has successfully entered the 'dead' state. Nov 02 08:48:08 managed-node3 sshd-session[6339]: Accepted publickey for root from 10.31.10.154 port 56260 ssh2: RSA SHA256:9j1blwt3wcrRiGYZQ7ZGu9axm3cDklH6/z4c+Ee8CzE Nov 02 08:48:08 managed-node3 sshd-session[6339]: pam_systemd(sshd:session): New sd-bus connection (system-bus-pam-systemd-6339) opened. Nov 02 08:48:08 managed-node3 systemd-logind[654]: New session 5 of user root. ░░ Subject: A new session 5 has been created for user root ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A new session with the ID 5 has been created for the user root. ░░ ░░ The leading process of the session is 6339. Nov 02 08:48:08 managed-node3 systemd[1]: Started session-5.scope - Session 5 of User root. ░░ Subject: A start job for unit session-5.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit session-5.scope has finished successfully. ░░ ░░ The job identifier is 1150. Nov 02 08:48:08 managed-node3 sshd-session[6339]: pam_unix(sshd:session): session opened for user root(uid=0) by root(uid=0) Nov 02 08:48:10 managed-node3 python3.12[6474]: ansible-ansible.legacy.setup Invoked with gather_subset=['all'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d Nov 02 08:48:12 managed-node3 python3.12[6616]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Nov 02 08:48:12 managed-node3 python3.12[6729]: ansible-ansible.legacy.dnf Invoked with name=['tang'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Nov 02 08:48:15 managed-node3 groupadd[6751]: group added to /etc/group: name=tang, GID=994 Nov 02 08:48:15 managed-node3 groupadd[6751]: group added to /etc/gshadow: name=tang Nov 02 08:48:15 managed-node3 groupadd[6751]: new group: name=tang, GID=994 Nov 02 08:48:15 managed-node3 useradd[6756]: new user: name=tang, UID=994, GID=994, home=/var/cache/tang, shell=/usr/sbin/nologin, from=none Nov 02 08:48:16 managed-node3 systemd[1]: Started run-r36e5e0f6cdb14ac8b99106f1aebde39e.service - /usr/bin/systemctl start man-db-cache-update. ░░ Subject: A start job for unit run-r36e5e0f6cdb14ac8b99106f1aebde39e.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit run-r36e5e0f6cdb14ac8b99106f1aebde39e.service has finished successfully. ░░ ░░ The job identifier is 1231. Nov 02 08:48:16 managed-node3 systemd[1]: Starting man-db-cache-update.service... ░░ Subject: A start job for unit man-db-cache-update.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit man-db-cache-update.service has begun execution. ░░ ░░ The job identifier is 1308. Nov 02 08:48:16 managed-node3 systemd[1]: Reload requested from client PID 6773 ('systemctl') (unit session-5.scope)... Nov 02 08:48:16 managed-node3 systemd[1]: Reloading... Nov 02 08:48:16 managed-node3 systemd[1]: Reloading finished in 206 ms. Nov 02 08:48:16 managed-node3 systemd[1]: Queuing reload/restart jobs for marked units… Nov 02 08:48:17 managed-node3 python3.12[6937]: ansible-fedora.linux_system_roles.nbde_server_tang Invoked with state=keys-created keygen=/usr/libexec/tangd-keygen keydir=/var/db/tang update=/usr/libexec/tangd-update cachedir=/var/cache/tang force=False keys_to_deploy_dir=None Nov 02 08:48:18 managed-node3 python3.12[7063]: ansible-stat Invoked with path=/etc/systemd/system/tangd.socket.d follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Nov 02 08:48:18 managed-node3 python3.12[7176]: ansible-file Invoked with path=/etc/systemd/system/tangd.socket.d state=absent mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 02 08:48:18 managed-node3 systemd[1]: man-db-cache-update.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit man-db-cache-update.service has successfully entered the 'dead' state. Nov 02 08:48:18 managed-node3 systemd[1]: Finished man-db-cache-update.service. ░░ Subject: A start job for unit man-db-cache-update.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit man-db-cache-update.service has finished successfully. ░░ ░░ The job identifier is 1308. Nov 02 08:48:18 managed-node3 systemd[1]: run-r36e5e0f6cdb14ac8b99106f1aebde39e.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-r36e5e0f6cdb14ac8b99106f1aebde39e.service has successfully entered the 'dead' state. Nov 02 08:48:19 managed-node3 python3.12[7293]: ansible-ansible.legacy.systemd Invoked with name=tangd.socket state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Nov 02 08:48:19 managed-node3 systemd[1]: Reload requested from client PID 7296 ('systemctl') (unit session-5.scope)... Nov 02 08:48:19 managed-node3 systemd[1]: Reloading... Nov 02 08:48:19 managed-node3 systemd[1]: Reloading finished in 183 ms. Nov 02 08:48:20 managed-node3 systemd[1]: Starting tangd.socket - Tang Server socket... ░░ Subject: A start job for unit tangd.socket has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit tangd.socket has begun execution. ░░ ░░ The job identifier is 1385. Nov 02 08:48:20 managed-node3 systemd[1]: Listening on tangd.socket - Tang Server socket. ░░ Subject: A start job for unit tangd.socket has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit tangd.socket has finished successfully. ░░ ░░ The job identifier is 1385. Nov 02 08:48:20 managed-node3 python3.12[7462]: ansible-ansible.legacy.dnf Invoked with name=['tang'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Nov 02 08:48:21 managed-node3 python3.12[7576]: ansible-fedora.linux_system_roles.nbde_server_tang Invoked with state=keys-created keygen=/usr/libexec/tangd-keygen keydir=/var/db/tang update=/usr/libexec/tangd-update cachedir=/var/cache/tang force=False keys_to_deploy_dir=None Nov 02 08:48:21 managed-node3 python3.12[7689]: ansible-find Invoked with paths=['/var/db/tang'] hidden=True patterns=['*.jwk', '.*.jwk'] read_whole_file=False file_type=file age_stamp=mtime recurse=False follow=False get_checksum=False use_regex=False exact_mode=True excludes=None contains=None age=None size=None depth=None mode=None encoding=None Nov 02 08:48:22 managed-node3 python3.12[7802]: ansible-ansible.legacy.stat Invoked with path=/var/db/tang/SdvwLSSLYgZIKos-Xe_3wxn0XnzqSK0wvZGijVvK8So.jwk follow=True get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Nov 02 08:48:22 managed-node3 python3.12[7934]: ansible-ansible.legacy.stat Invoked with path=/var/db/tang/2vi2yxUPH1h3AkyGLdkfVcraZE8X4h5yVtmlOqICNfU.jwk follow=True get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Nov 02 08:48:24 managed-node3 python3.12[8066]: ansible-stat Invoked with path=/etc/systemd/system/tangd.socket.d follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Nov 02 08:48:24 managed-node3 python3.12[8179]: ansible-file Invoked with path=/etc/systemd/system/tangd.socket.d state=absent mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 02 08:48:25 managed-node3 python3.12[8292]: ansible-ansible.legacy.systemd Invoked with name=tangd.socket state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Nov 02 08:48:25 managed-node3 python3.12[8407]: ansible-find Invoked with paths=['/var/db/tang'] hidden=True patterns=['*.jwk', '.*.jwk'] read_whole_file=False file_type=file age_stamp=mtime recurse=False follow=False get_checksum=False use_regex=False exact_mode=True excludes=None contains=None age=None size=None depth=None mode=None encoding=None Nov 02 08:48:26 managed-node3 python3.12[8520]: ansible-ansible.legacy.dnf Invoked with name=['tang'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Nov 02 08:48:27 managed-node3 python3.12[8634]: ansible-fedora.linux_system_roles.nbde_server_tang Invoked with state=keys-created keygen=/usr/libexec/tangd-keygen keydir=/var/db/tang update=/usr/libexec/tangd-update cachedir=/var/cache/tang force=False keys_to_deploy_dir=None Nov 02 08:48:27 managed-node3 python3.12[8747]: ansible-find Invoked with paths=['/var/db/tang'] hidden=True patterns=['*.jwk', '.*.jwk'] read_whole_file=False file_type=file age_stamp=mtime recurse=False follow=False get_checksum=False use_regex=False exact_mode=True excludes=None contains=None age=None size=None depth=None mode=None encoding=None Nov 02 08:48:28 managed-node3 python3.12[8860]: ansible-tempfile Invoked with state=directory suffix=nbde_server_keys prefix=ansible. path=None Nov 02 08:48:29 managed-node3 python3.12[8973]: ansible-ansible.legacy.stat Invoked with path=/tmp/ansible.cucliqnlnbde_server_keys/SdvwLSSLYgZIKos-Xe_3wxn0XnzqSK0wvZGijVvK8So.jwk follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Nov 02 08:48:29 managed-node3 python3.12[9063]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1730551708.9766953-7849-149548432949615/.source.jwk dest=/tmp/ansible.cucliqnlnbde_server_keys/ owner=tang group=tang mode=0400 _original_basename=SdvwLSSLYgZIKos-Xe_3wxn0XnzqSK0wvZGijVvK8So.jwk follow=False checksum=832602c6b2608429b22fa1abea938040a568f02d backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Nov 02 08:48:30 managed-node3 python3.12[9176]: ansible-ansible.legacy.stat Invoked with path=/tmp/ansible.cucliqnlnbde_server_keys/2vi2yxUPH1h3AkyGLdkfVcraZE8X4h5yVtmlOqICNfU.jwk follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Nov 02 08:48:30 managed-node3 python3.12[9266]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1730551709.9548132-7849-182630528469302/.source.jwk dest=/tmp/ansible.cucliqnlnbde_server_keys/ owner=tang group=tang mode=0400 _original_basename=2vi2yxUPH1h3AkyGLdkfVcraZE8X4h5yVtmlOqICNfU.jwk follow=False checksum=4c03462e604679f06f6e2eca8890800a14a35e0d backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Nov 02 08:48:30 managed-node3 python3.12[9379]: ansible-fedora.linux_system_roles.nbde_server_tang Invoked with state=keys-deployed keydir=/var/db/tang keygen=/usr/libexec/tangd-keygen keys_to_deploy_dir=/tmp/ansible.cucliqnlnbde_server_keys update=/usr/libexec/tangd-update cachedir=/var/cache/tang force=False Nov 02 08:48:31 managed-node3 python3.12[9492]: ansible-file Invoked with path=/tmp/ansible.cucliqnlnbde_server_keys/ state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 02 08:48:31 managed-node3 python3.12[9605]: ansible-stat Invoked with path=/etc/systemd/system/tangd.socket.d follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Nov 02 08:48:32 managed-node3 python3.12[9718]: ansible-file Invoked with path=/etc/systemd/system/tangd.socket.d state=absent mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 02 08:48:33 managed-node3 python3.12[9831]: ansible-ansible.legacy.systemd Invoked with name=tangd.socket state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Nov 02 08:48:33 managed-node3 python3.12[9946]: ansible-find Invoked with paths=['/var/db/tang'] hidden=True patterns=['*.jwk', '.*.jwk'] read_whole_file=False file_type=file age_stamp=mtime recurse=False follow=False get_checksum=False use_regex=False exact_mode=True excludes=None contains=None age=None size=None depth=None mode=None encoding=None Nov 02 08:48:34 managed-node3 python3.12[10059]: ansible-ansible.legacy.command Invoked with _raw_params=set -euxo pipefail if [ "true" = true ]; then rm -rf "/tmp/nbde_server_deploy_keyszx2av9mq" else rm -rf "/tmp/nbde_server_deploy_keyszx2av9mq"/* "/tmp/nbde_server_deploy_keyszx2av9mq"/.* || : fi _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Nov 02 08:48:34 managed-node3 python3.12[10174]: ansible-ansible.legacy.command Invoked with _raw_params=set -euxo pipefail if [ "false" = true ]; then rm -rf "/var/db/tang" else rm -rf "/var/db/tang"/* "/var/db/tang"/.* || : fi _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Nov 02 08:48:35 managed-node3 python3.12[10289]: ansible-ansible.legacy.command Invoked with _raw_params=set -euxo pipefail if [ "false" = true ]; then rm -rf "/var/cache/tang" else rm -rf "/var/cache/tang"/* "/var/cache/tang"/.* || : fi _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Nov 02 08:48:38 managed-node3 python3.12[10441]: ansible-ansible.legacy.setup Invoked with gather_subset=['all'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d Nov 02 08:48:39 managed-node3 python3.12[10583]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Nov 02 08:48:40 managed-node3 python3.12[10696]: ansible-ansible.legacy.dnf Invoked with name=['tang'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Nov 02 08:48:41 managed-node3 python3.12[10810]: ansible-fedora.linux_system_roles.nbde_server_tang Invoked with state=keys-created keygen=/usr/libexec/tangd-keygen keydir=/var/db/tang update=/usr/libexec/tangd-update cachedir=/var/cache/tang force=False keys_to_deploy_dir=None Nov 02 08:48:41 managed-node3 python3.12[10936]: ansible-ansible.legacy.command Invoked with _raw_params=set -euxo pipefail if [ "false" = true ]; then rm -rf "/var/db/tang" else rm -rf "/var/db/tang"/* "/var/db/tang"/.* || : fi _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Nov 02 08:48:42 managed-node3 python3.12[11051]: ansible-ansible.legacy.command Invoked with _raw_params=set -euxo pipefail if [ "false" = true ]; then rm -rf "/var/cache/tang" else rm -rf "/var/cache/tang"/* "/var/cache/tang"/.* || : fi _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Nov 02 08:48:43 managed-node3 python3.12[11166]: ansible-ansible.legacy.dnf Invoked with name=['tang'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Nov 02 08:48:43 managed-node3 python3.12[11280]: ansible-fedora.linux_system_roles.nbde_server_tang Invoked with state=keys-created keygen=/usr/libexec/tangd-keygen keydir=/var/db/tang update=/usr/libexec/tangd-update cachedir=/var/cache/tang force=False keys_to_deploy_dir=None Nov 02 08:48:44 managed-node3 python3.12[11406]: ansible-ansible.legacy.command Invoked with _raw_params=set -euxo pipefail if [ "false" = true ]; then rm -rf "/var/db/tang" else rm -rf "/var/db/tang"/* "/var/db/tang"/.* || : fi _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Nov 02 08:48:44 managed-node3 python3.12[11521]: ansible-ansible.legacy.command Invoked with _raw_params=set -euxo pipefail if [ "false" = true ]; then rm -rf "/var/cache/tang" else rm -rf "/var/cache/tang"/* "/var/cache/tang"/.* || : fi _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Nov 02 08:48:45 managed-node3 python3.12[11636]: ansible-ansible.legacy.dnf Invoked with name=['tang'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Nov 02 08:48:46 managed-node3 python3.12[11750]: ansible-fedora.linux_system_roles.nbde_server_tang Invoked with state=keys-created keygen=/usr/libexec/tangd-keygen keydir=/var/db/tang update=/usr/libexec/tangd-update cachedir=/var/cache/tang force=False keys_to_deploy_dir=None Nov 02 08:48:46 managed-node3 python3.12[11876]: ansible-ansible.legacy.command Invoked with _raw_params=set -euxo pipefail if [ "false" = true ]; then rm -rf "/var/db/tang" else rm -rf "/var/db/tang"/* "/var/db/tang"/.* || : fi _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Nov 02 08:48:47 managed-node3 python3.12[11991]: ansible-ansible.legacy.command Invoked with _raw_params=set -euxo pipefail if [ "false" = true ]; then rm -rf "/var/cache/tang" else rm -rf "/var/cache/tang"/* "/var/cache/tang"/.* || : fi _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Nov 02 08:48:48 managed-node3 python3.12[12106]: ansible-ansible.legacy.dnf Invoked with name=['tang'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Nov 02 08:48:48 managed-node3 python3.12[12220]: ansible-fedora.linux_system_roles.nbde_server_tang Invoked with state=keys-created keygen=/usr/libexec/tangd-keygen keydir=/var/db/tang update=/usr/libexec/tangd-update cachedir=/var/cache/tang force=False keys_to_deploy_dir=None Nov 02 08:48:49 managed-node3 python3.12[12346]: ansible-ansible.legacy.command Invoked with _raw_params=set -euxo pipefail if [ "false" = true ]; then rm -rf "/var/db/tang" else rm -rf "/var/db/tang"/* "/var/db/tang"/.* || : fi _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Nov 02 08:48:49 managed-node3 python3.12[12461]: ansible-ansible.legacy.command Invoked with _raw_params=set -euxo pipefail if [ "false" = true ]; then rm -rf "/var/cache/tang" else rm -rf "/var/cache/tang"/* "/var/cache/tang"/.* || : fi _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Nov 02 08:48:50 managed-node3 python3.12[12576]: ansible-ansible.legacy.dnf Invoked with name=['tang'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Nov 02 08:48:51 managed-node3 python3.12[12690]: ansible-fedora.linux_system_roles.nbde_server_tang Invoked with state=keys-created keygen=/usr/libexec/tangd-keygen keydir=/var/db/tang update=/usr/libexec/tangd-update cachedir=/var/cache/tang force=False keys_to_deploy_dir=None Nov 02 08:48:52 managed-node3 python3.12[12816]: ansible-ansible.legacy.command Invoked with _raw_params=set -euxo pipefail if [ "false" = true ]; then rm -rf "/var/db/tang" else rm -rf "/var/db/tang"/* "/var/db/tang"/.* || : fi _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Nov 02 08:48:52 managed-node3 python3.12[12931]: ansible-ansible.legacy.command Invoked with _raw_params=set -euxo pipefail if [ "false" = true ]; then rm -rf "/var/cache/tang" else rm -rf "/var/cache/tang"/* "/var/cache/tang"/.* || : fi _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Nov 02 08:48:54 managed-node3 python3.12[13083]: ansible-file Invoked with path=/etc/systemd/system/tangd.socket.d state=directory mode=0775 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 02 08:48:55 managed-node3 python3.12[13196]: ansible-ansible.legacy.stat Invoked with path=/etc/systemd/system/tangd.socket.d/override2.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Nov 02 08:48:55 managed-node3 python3.12[13286]: ansible-ansible.legacy.copy Invoked with dest=/etc/systemd/system/tangd.socket.d/override2.conf mode=0664 src=/root/.ansible/tmp/ansible-tmp-1730551734.7940044-10022-93758658161409/.source.conf _original_basename=.5xaue2v7 follow=False checksum=05987691cc309e84627f31fa0d1680a3b3b2c4b2 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 02 08:48:56 managed-node3 python3.12[13399]: ansible-setup Invoked with gather_subset=['!all', '!min', 'distribution', 'distribution_major_version', 'distribution_version', 'os_family'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d Nov 02 08:48:56 managed-node3 python3.12[13514]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Nov 02 08:48:57 managed-node3 python3.12[13627]: ansible-ansible.legacy.setup Invoked with filter=['ansible_pkg_mgr'] gather_subset=['!all'] gather_timeout=10 fact_path=/etc/ansible/facts.d Nov 02 08:48:57 managed-node3 python3.12[13686]: ansible-ansible.legacy.dnf Invoked with name=['tang'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Nov 02 08:48:58 managed-node3 python3.12[13800]: ansible-fedora.linux_system_roles.nbde_server_tang Invoked with state=keys-created keygen=/usr/libexec/tangd-keygen keydir=/var/db/tang update=/usr/libexec/tangd-update cachedir=/var/cache/tang force=False keys_to_deploy_dir=None Nov 02 08:48:59 managed-node3 python3.12[13926]: ansible-stat Invoked with path=/etc/systemd/system/tangd.socket.d follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Nov 02 08:48:59 managed-node3 python3.12[14041]: ansible-find Invoked with paths=['/etc/systemd/system/tangd.socket.d'] file_type=any hidden=True excludes=['^override.conf$'] use_regex=True patterns=[] read_whole_file=False age_stamp=mtime recurse=False follow=False get_checksum=False exact_mode=True contains=None age=None size=None depth=None mode=None encoding=None Nov 02 08:49:00 managed-node3 python3.12[14154]: ansible-file Invoked with path=/etc/systemd/system/tangd.socket.d state=directory mode=0775 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 02 08:49:00 managed-node3 python3.12[14267]: ansible-ansible.legacy.setup Invoked with gather_subset=['!all'] filter=['ansible_service_mgr'] gather_timeout=10 fact_path=/etc/ansible/facts.d Nov 02 08:49:01 managed-node3 python3.12[14326]: ansible-ansible.legacy.systemd Invoked with name=tangd.socket state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Nov 02 08:49:02 managed-node3 python3.12[14441]: ansible-stat Invoked with path=/etc/systemd/system/tangd.socket.d follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Nov 02 08:49:02 managed-node3 python3.12[14556]: ansible-stat Invoked with path=/etc/systemd/system/tangd.socket.d/override2.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Nov 02 08:49:03 managed-node3 python3.12[14671]: ansible-ansible.legacy.setup Invoked with filter=['ansible_pkg_mgr'] gather_subset=['!all'] gather_timeout=10 fact_path=/etc/ansible/facts.d Nov 02 08:49:03 managed-node3 python3.12[14730]: ansible-ansible.legacy.dnf Invoked with name=['tang'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Nov 02 08:49:04 managed-node3 python3.12[14844]: ansible-fedora.linux_system_roles.nbde_server_tang Invoked with state=keys-created keygen=/usr/libexec/tangd-keygen keydir=/var/db/tang update=/usr/libexec/tangd-update cachedir=/var/cache/tang force=False keys_to_deploy_dir=None Nov 02 08:49:04 managed-node3 python3.12[14957]: ansible-setup Invoked with gather_subset=['!all', '!min', 'distribution', 'distribution_major_version', 'python_version'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d Nov 02 08:49:05 managed-node3 python3.12[15073]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Nov 02 08:49:05 managed-node3 python3.12[15186]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Nov 02 08:49:06 managed-node3 python3.12[15299]: ansible-ansible.legacy.setup Invoked with filter=['ansible_pkg_mgr'] gather_subset=['!all'] gather_timeout=10 fact_path=/etc/ansible/facts.d Nov 02 08:49:06 managed-node3 python3.12[15358]: ansible-ansible.legacy.dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Nov 02 08:49:07 managed-node3 python3.12[15472]: ansible-ansible.legacy.setup Invoked with filter=['ansible_pkg_mgr'] gather_subset=['!all'] gather_timeout=10 fact_path=/etc/ansible/facts.d Nov 02 08:49:07 managed-node3 python3.12[15531]: ansible-ansible.legacy.dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Nov 02 08:49:07 managed-node3 dbus-broker-launch[645]: Noticed file-system modification, trigger reload. ░░ Subject: A configuration directory was written to ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ A write was detected to one of the directories containing D-Bus configuration ░░ files, triggering a configuration reload. ░░ ░░ This functionality exists for backwards compatibility to pick up changes to ░░ D-Bus configuration without an explicit reolad request. Typically when ░░ installing or removing third-party software causes D-Bus configuration files ░░ to be added or removed. ░░ ░░ It is worth noting that this may cause partial configuration to be loaded in ░░ case dispatching this notification races with the writing of the configuration ░░ files. However, a future notification will then cause the configuration to be ░░ reladed again. Nov 02 08:49:07 managed-node3 dbus-broker-launch[645]: Noticed file-system modification, trigger reload. ░░ Subject: A configuration directory was written to ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ A write was detected to one of the directories containing D-Bus configuration ░░ files, triggering a configuration reload. ░░ ░░ This functionality exists for backwards compatibility to pick up changes to ░░ D-Bus configuration without an explicit reolad request. Typically when ░░ installing or removing third-party software causes D-Bus configuration files ░░ to be added or removed. ░░ ░░ It is worth noting that this may cause partial configuration to be loaded in ░░ case dispatching this notification races with the writing of the configuration ░░ files. However, a future notification will then cause the configuration to be ░░ reladed again. Nov 02 08:49:07 managed-node3 systemd[1]: Started run-r1a3e4ed74f444fcc8d0c2266a43883dc.service - /usr/bin/systemctl start man-db-cache-update. ░░ Subject: A start job for unit run-r1a3e4ed74f444fcc8d0c2266a43883dc.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit run-r1a3e4ed74f444fcc8d0c2266a43883dc.service has finished successfully. ░░ ░░ The job identifier is 1464. Nov 02 08:49:07 managed-node3 systemd[1]: Starting man-db-cache-update.service... ░░ Subject: A start job for unit man-db-cache-update.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit man-db-cache-update.service has begun execution. ░░ ░░ The job identifier is 1541. Nov 02 08:49:08 managed-node3 systemd[1]: man-db-cache-update.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit man-db-cache-update.service has successfully entered the 'dead' state. Nov 02 08:49:08 managed-node3 systemd[1]: Finished man-db-cache-update.service. ░░ Subject: A start job for unit man-db-cache-update.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit man-db-cache-update.service has finished successfully. ░░ ░░ The job identifier is 1541. Nov 02 08:49:08 managed-node3 systemd[1]: run-r1a3e4ed74f444fcc8d0c2266a43883dc.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-r1a3e4ed74f444fcc8d0c2266a43883dc.service has successfully entered the 'dead' state. Nov 02 08:49:08 managed-node3 python3.12[15654]: ansible-setup Invoked with filter=['ansible_selinux'] gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d Nov 02 08:49:09 managed-node3 python3.12[15798]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['7500'] proto=tcp setype=tangd_port_t state=present local=True ignore_selinux_state=False reload=True Nov 02 08:49:11 managed-node3 kernel: SELinux: Converting 398 SID table entries... Nov 02 08:49:11 managed-node3 kernel: SELinux: policy capability network_peer_controls=1 Nov 02 08:49:11 managed-node3 kernel: SELinux: policy capability open_perms=1 Nov 02 08:49:11 managed-node3 kernel: SELinux: policy capability extended_socket_class=1 Nov 02 08:49:11 managed-node3 kernel: SELinux: policy capability always_check_network=0 Nov 02 08:49:11 managed-node3 kernel: SELinux: policy capability cgroup_seclabel=1 Nov 02 08:49:11 managed-node3 kernel: SELinux: policy capability nnp_nosuid_transition=1 Nov 02 08:49:11 managed-node3 kernel: SELinux: policy capability genfs_seclabel_symlinks=1 Nov 02 08:49:11 managed-node3 kernel: SELinux: policy capability ioctl_skip_cloexec=0 Nov 02 08:49:11 managed-node3 kernel: SELinux: policy capability userspace_initial_context=0 Nov 02 08:49:12 managed-node3 python3.12[15915]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked Nov 02 08:49:15 managed-node3 python3.12[16028]: ansible-stat Invoked with path=/etc/systemd/system/tangd.socket.d follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Nov 02 08:49:16 managed-node3 python3.12[16143]: ansible-file Invoked with path=/etc/systemd/system/tangd.socket.d state=directory mode=0775 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 02 08:49:16 managed-node3 python3.12[16256]: ansible-ansible.legacy.stat Invoked with path=/etc/systemd/system/tangd.socket.d/override.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Nov 02 08:49:17 managed-node3 python3.12[16346]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1730551756.4544008-10995-180496254537908/.source.conf dest=/etc/systemd/system/tangd.socket.d/override.conf backup=True mode=0644 follow=False _original_basename=tangd_socket_override.conf.j2 checksum=cab519df8c21e60fd06ac780e2c7bd41ad441042 force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 02 08:49:17 managed-node3 python3.12[16459]: ansible-setup Invoked with gather_subset=['!all', '!min', 'python_version', 'service_mgr'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d Nov 02 08:49:18 managed-node3 python3.12[16575]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Nov 02 08:49:18 managed-node3 python3.12[16688]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Nov 02 08:49:19 managed-node3 python3.12[16801]: ansible-ansible.legacy.setup Invoked with filter=['ansible_pkg_mgr'] gather_subset=['!all'] gather_timeout=10 fact_path=/etc/ansible/facts.d Nov 02 08:49:19 managed-node3 python3.12[16860]: ansible-ansible.legacy.dnf Invoked with name=['firewalld'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Nov 02 08:49:20 managed-node3 python3.12[16974]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False scope=system no_block=False state=None enabled=None force=None Nov 02 08:49:20 managed-node3 python3.12[17089]: ansible-ansible.legacy.systemd Invoked with name=firewalld state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Nov 02 08:49:20 managed-node3 systemd[1]: Reload requested from client PID 17092 ('systemctl') (unit session-5.scope)... Nov 02 08:49:20 managed-node3 systemd[1]: Reloading... Nov 02 08:49:21 managed-node3 systemd[1]: tangd.socket: Socket unit configuration has changed while unit has been running, no open socket file descriptor left. The socket unit is not functional until restarted. Nov 02 08:49:21 managed-node3 systemd[1]: Reloading finished in 186 ms. Nov 02 08:49:21 managed-node3 systemd[1]: Starting firewalld.service - firewalld - dynamic firewall daemon... ░░ Subject: A start job for unit firewalld.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit firewalld.service has begun execution. ░░ ░░ The job identifier is 1618. Nov 02 08:49:21 managed-node3 systemd[1]: Started firewalld.service - firewalld - dynamic firewall daemon. ░░ Subject: A start job for unit firewalld.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit firewalld.service has finished successfully. ░░ ░░ The job identifier is 1618. Nov 02 08:49:21 managed-node3 systemd[1]: Starting polkit.service - Authorization Manager... ░░ Subject: A start job for unit polkit.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit polkit.service has begun execution. ░░ ░░ The job identifier is 1701. Nov 02 08:49:22 managed-node3 polkitd[17242]: Started polkitd version 125 Nov 02 08:49:22 managed-node3 systemd[1]: Started polkit.service - Authorization Manager. ░░ Subject: A start job for unit polkit.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit polkit.service has finished successfully. ░░ ░░ The job identifier is 1701. Nov 02 08:49:22 managed-node3 python3.12[17306]: ansible-fedora.linux_system_roles.firewall_lib Invoked with zone=public port=['7500/tcp'] permanent=True runtime=True state=enabled __report_changed=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None set_default_zone=None ipset=None ipset_type=None description=None short=None Nov 02 08:49:23 managed-node3 python3.12[17419]: ansible-systemd Invoked with daemon_reload=True daemon_reexec=False scope=system no_block=False name=None state=None enabled=None force=None masked=None Nov 02 08:49:23 managed-node3 systemd[1]: Reload requested from client PID 17420 ('systemctl') (unit session-5.scope)... Nov 02 08:49:23 managed-node3 systemd[1]: Reloading... Nov 02 08:49:23 managed-node3 systemd[1]: tangd.socket: Socket unit configuration has changed while unit has been running, no open socket file descriptor left. The socket unit is not functional until restarted. Nov 02 08:49:23 managed-node3 systemd[1]: Reloading finished in 191 ms. Nov 02 08:49:23 managed-node3 systemd[1]: Starting logrotate.service - Rotate log files... ░░ Subject: A start job for unit logrotate.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit logrotate.service has begun execution. ░░ ░░ The job identifier is 1779. Nov 02 08:49:23 managed-node3 systemd[1]: logrotate.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit logrotate.service has successfully entered the 'dead' state. Nov 02 08:49:23 managed-node3 systemd[1]: Finished logrotate.service - Rotate log files. ░░ Subject: A start job for unit logrotate.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit logrotate.service has finished successfully. ░░ ░░ The job identifier is 1779. Nov 02 08:49:24 managed-node3 python3.12[17583]: ansible-ansible.legacy.systemd Invoked with name=tangd.socket state=restarted enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Nov 02 08:49:24 managed-node3 systemd[1]: tangd.socket: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit tangd.socket has successfully entered the 'dead' state. Nov 02 08:49:24 managed-node3 systemd[1]: Closed tangd.socket - Tang Server socket. ░░ Subject: A stop job for unit tangd.socket has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit tangd.socket has finished. ░░ ░░ The job identifier is 1856 and the job result is done. Nov 02 08:49:24 managed-node3 systemd[1]: Stopping tangd.socket - Tang Server socket... ░░ Subject: A stop job for unit tangd.socket has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit tangd.socket has begun execution. ░░ ░░ The job identifier is 1856. Nov 02 08:49:24 managed-node3 systemd[1]: Starting tangd.socket - Tang Server socket... ░░ Subject: A start job for unit tangd.socket has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit tangd.socket has begun execution. ░░ ░░ The job identifier is 1856. Nov 02 08:49:24 managed-node3 systemd[1]: Listening on tangd.socket - Tang Server socket. ░░ Subject: A start job for unit tangd.socket has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit tangd.socket has finished successfully. ░░ ░░ The job identifier is 1856. Nov 02 08:49:24 managed-node3 python3.12[17705]: ansible-stat Invoked with path=/etc/systemd/system/tangd.socket.d follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Nov 02 08:49:24 managed-node3 python3.12[17820]: ansible-stat Invoked with path=/etc/systemd/system/tangd.socket.d/override2.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Nov 02 08:49:25 managed-node3 python3.12[17935]: ansible-slurp Invoked with path=/etc/systemd/system/tangd.socket.d/override.conf src=/etc/systemd/system/tangd.socket.d/override.conf Nov 02 08:49:25 managed-node3 python3.12[18048]: ansible-file Invoked with path=/etc/systemd/system/tangd.socket.d/override2.conf state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 02 08:49:26 managed-node3 python3.12[18161]: ansible-ansible.legacy.setup Invoked with filter=['ansible_pkg_mgr'] gather_subset=['!all'] gather_timeout=10 fact_path=/etc/ansible/facts.d Nov 02 08:49:26 managed-node3 python3.12[18220]: ansible-ansible.legacy.dnf Invoked with name=['tang'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Nov 02 08:49:27 managed-node3 python3.12[18334]: ansible-fedora.linux_system_roles.nbde_server_tang Invoked with state=keys-created keygen=/usr/libexec/tangd-keygen keydir=/var/db/tang update=/usr/libexec/tangd-update cachedir=/var/cache/tang force=False keys_to_deploy_dir=None Nov 02 08:49:28 managed-node3 python3.12[18447]: ansible-stat Invoked with path=/etc/systemd/system/tangd.socket.d follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Nov 02 08:49:28 managed-node3 python3.12[18562]: ansible-find Invoked with paths=['/etc/systemd/system/tangd.socket.d'] file_type=any hidden=True excludes=['^override.conf$'] use_regex=True patterns=[] read_whole_file=False age_stamp=mtime recurse=False follow=False get_checksum=False exact_mode=True contains=None age=None size=None depth=None mode=None encoding=None Nov 02 08:49:28 managed-node3 python3.12[18675]: ansible-file Invoked with path=/etc/systemd/system/tangd.socket.d state=absent mode=0775 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 02 08:49:29 managed-node3 python3.12[18788]: ansible-systemd Invoked with daemon_reload=True daemon_reexec=False scope=system no_block=False name=None state=None enabled=None force=None masked=None Nov 02 08:49:29 managed-node3 systemd[1]: Reload requested from client PID 18789 ('systemctl') (unit session-5.scope)... Nov 02 08:49:29 managed-node3 systemd[1]: Reloading... Nov 02 08:49:29 managed-node3 systemd[1]: tangd.socket: Socket unit configuration has changed while unit has been running, no open socket file descriptor left. The socket unit is not functional until restarted. Nov 02 08:49:29 managed-node3 systemd[1]: Reloading finished in 185 ms. Nov 02 08:49:30 managed-node3 python3.12[18948]: ansible-ansible.legacy.systemd Invoked with name=tangd.socket state=restarted enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Nov 02 08:49:30 managed-node3 systemd[1]: tangd.socket: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit tangd.socket has successfully entered the 'dead' state. Nov 02 08:49:30 managed-node3 systemd[1]: Closed tangd.socket - Tang Server socket. ░░ Subject: A stop job for unit tangd.socket has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit tangd.socket has finished. ░░ ░░ The job identifier is 1933 and the job result is done. Nov 02 08:49:30 managed-node3 systemd[1]: Stopping tangd.socket - Tang Server socket... ░░ Subject: A stop job for unit tangd.socket has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit tangd.socket has begun execution. ░░ ░░ The job identifier is 1933. Nov 02 08:49:30 managed-node3 systemd[1]: Starting tangd.socket - Tang Server socket... ░░ Subject: A start job for unit tangd.socket has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit tangd.socket has begun execution. ░░ ░░ The job identifier is 1933. Nov 02 08:49:30 managed-node3 systemd[1]: Listening on tangd.socket - Tang Server socket. ░░ Subject: A start job for unit tangd.socket has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit tangd.socket has finished successfully. ░░ ░░ The job identifier is 1933. Nov 02 08:49:30 managed-node3 python3.12[19071]: ansible-stat Invoked with path=/etc/systemd/system/tangd.socket.d follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Nov 02 08:49:31 managed-node3 python3.12[19184]: ansible-ansible.legacy.command Invoked with _raw_params=journalctl -ex _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None TASK [Cleanup] ***************************************************************** task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/tests/nbde_server/tests_share_system_dir.yml:100 Saturday 02 November 2024 08:49:31 -0400 (0:00:00.554) 0:00:37.366 ***** included: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/tests/nbde_server/tasks/cleanup.yml for managed-node3 TASK [Remove control node files/directories] *********************************** task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/tests/nbde_server/tasks/cleanup.yml:2 Saturday 02 November 2024 08:49:31 -0400 (0:00:00.063) 0:00:37.429 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "nbde_server_keys_dir | d(\"\") is match(\"^/\")", "skip_reason": "Conditional result was False" } TASK [Remove managed node files/directories] *********************************** task path: /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/tests/nbde_server/tasks/cleanup.yml:9 Saturday 02 November 2024 08:49:31 -0400 (0:00:00.073) 0:00:37.503 ***** skipping: [managed-node3] => (item={'path': '', 'remove_dir': 'true'}) => { "ansible_loop_var": "item", "changed": false, "false_condition": "item.path is match(\"^/\")", "item": { "path": "", "remove_dir": "true" }, "skip_reason": "Conditional result was False" } changed: [managed-node3] => (item={'path': '/var/db/tang', 'remove_dir': 'false'}) => { "ansible_loop_var": "item", "changed": true, "cmd": "set -euxo pipefail\nif [ \"false\" = true ]; then\n rm -rf \"/var/db/tang\"\nelse\n rm -rf \"/var/db/tang\"/* \"/var/db/tang\"/.* || :\nfi\n", "delta": "0:00:00.004713", "end": "2024-11-02 08:49:31.884832", "item": { "path": "/var/db/tang", "remove_dir": "false" }, "rc": 0, "start": "2024-11-02 08:49:31.880119" } STDERR: + '[' false = true ']' + rm -rf /var/db/tang/NQ054z0B3uSvyIdG2dyBtKLKV-F0lUjf6m5-u3zjQrI.jwk /var/db/tang/RECnQHSD-Nl0AIs4xnIa8lNPCvykG5mqMF9X3sbdkZo.jwk '/var/db/tang/.*' changed: [managed-node3] => (item={'path': '/var/cache/tang', 'remove_dir': 'false'}) => { "ansible_loop_var": "item", "changed": true, "cmd": "set -euxo pipefail\nif [ \"false\" = true ]; then\n rm -rf \"/var/cache/tang\"\nelse\n rm -rf \"/var/cache/tang\"/* \"/var/cache/tang\"/.* || :\nfi\n", "delta": "0:00:00.003807", "end": "2024-11-02 08:49:32.212807", "item": { "path": "/var/cache/tang", "remove_dir": "false" }, "rc": 0, "start": "2024-11-02 08:49:32.209000" } STDERR: + '[' false = true ']' + rm -rf '/var/cache/tang/*' '/var/cache/tang/.*' PLAY RECAP ********************************************************************* managed-node3 : ok=83 changed=13 unreachable=0 failed=0 skipped=55 rescued=0 ignored=0 Saturday 02 November 2024 08:49:32 -0400 (0:00:00.771) 0:00:38.274 ***** =============================================================================== fedora.linux_system_roles.selinux : Get SELinux modules facts ----------- 3.53s /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:112 fedora.linux_system_roles.selinux : Set an SELinux label on a port ------ 2.64s /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:87 fedora.linux_system_roles.selinux : Install SELinux tool semanage ------- 1.43s /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:58 fedora.linux_system_roles.nbde_server : Ensure required services are enabled and at the right state --- 1.37s /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/main-tang.yml:39 fedora.linux_system_roles.nbde_server : Ensure tang is installed -------- 1.26s /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/main-tang.yml:2 fedora.linux_system_roles.firewall : Enable and start firewalld service --- 1.13s /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:28 fedora.linux_system_roles.firewall : Install firewalld ------------------ 1.07s /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:31 fedora.linux_system_roles.nbde_server : Ensure tang is installed -------- 1.07s /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/main-tang.yml:2 fedora.linux_system_roles.nbde_server : Ensure tang is installed -------- 1.05s /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/main-tang.yml:2 fedora.linux_system_roles.selinux : Install SELinux python3 tools ------- 1.04s /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:35 Create a customization systemd file ------------------------------------- 0.89s /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/tests/nbde_server/tests_share_system_dir.yml:20 fedora.linux_system_roles.selinux : Refresh facts ----------------------- 0.88s /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:89 fedora.linux_system_roles.nbde_server : Ensure ansible_facts used by role --- 0.87s /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/set_vars.yml:2 Remove managed node files/directories ----------------------------------- 0.77s /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/tests/nbde_server/tasks/cleanup.yml:9 fedora.linux_system_roles.nbde_server : Reload the daemons so the new changes take effect --- 0.73s /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/main-tang.yml:34 fedora.linux_system_roles.nbde_server : Reload the daemons so the new changes take effect --- 0.73s /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/main-tang.yml:34 Create the tangd.socket.d directory ------------------------------------- 0.71s /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/tests/nbde_server/tests_share_system_dir.yml:14 fedora.linux_system_roles.firewall : Configure firewall ----------------- 0.70s /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:71 fedora.linux_system_roles.nbde_server : Creates the file with the port entry that we want tangd to listen to --- 0.68s /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/tangd-custom-port.yml:44 fedora.linux_system_roles.nbde_server : Ensure required services are enabled and at the right state --- 0.60s /tmp/collections-9Uu/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/main-tang.yml:39