# STDOUT: ---v---v---v---v---v--- ansible-playbook [core 2.17.2] config file = /etc/ansible/ansible.cfg configured module search path = ['/home/jenkins/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules'] ansible python module location = /opt/ansible-2.17/lib/python3.11/site-packages/ansible ansible collection location = /WORKDIR/git-weekly-cig48sz_fx/.collection executable location = /opt/ansible-2.17/bin/ansible-playbook python version = 3.11.5 (main, Sep 7 2023, 00:00:00) [GCC 11.4.1 20230605 (Red Hat 11.4.1-2)] (/opt/ansible-2.17/bin/python) jinja version = 3.1.4 libyaml = True Using /etc/ansible/ansible.cfg as config file Skipping callback 'debug', as we already have a stdout callback. Skipping callback 'default', as we already have a stdout callback. Skipping callback 'minimal', as we already have a stdout callback. Skipping callback 'oneline', as we already have a stdout callback. PLAYBOOK: tests_quadlet_demo.yml *********************************************** 2 plays in /WORKDIR/git-weekly-cig48sz_fx/tests/tests_quadlet_demo.yml PLAY [all] ********************************************************************* TASK [Include vault variables] ************************************************* task path: /WORKDIR/git-weekly-cig48sz_fx/tests/tests_quadlet_demo.yml:4 Saturday 03 August 2024 16:42:33 +0000 (0:00:00.025) 0:00:00.025 ******* ok: [sut] => { "ansible_facts": { "__podman_test_password": { "__ansible_vault": "$ANSIBLE_VAULT;1.1;AES256\n35383939616163653333633431363463313831383037386236646138333162396161356130303461\n3932623930643263313563336163316337643562333936360a363538636631313039343233383732\n38666530383538656639363465313230343533386130303833336434303438333161656262346562\n3362626538613031640a663330613638366132356534363534353239616666653466353961323533\n6565\n" }, "mysql_container_root_password": { "__ansible_vault": "$ANSIBLE_VAULT;1.1;AES256\n61333932373230333539663035366431326163363166363036323963623131363530326231303634\n6635326161643165363366323062333334363730376631660a393566366139353861656364656661\n38653463363837336639363032646433666361646535366137303464623261313663643336306465\n6264663730656337310a343962353137386238383064646533366433333437303566656433386233\n34343235326665646661623131643335313236313131353661386338343366316261643634653633\n3832313034366536616531323963333234326461353130303532\n" } }, "ansible_included_var_files": [ "/WORKDIR/git-weekly-cig48sz_fx/tests/vars/vault-variables.yml" ], "changed": false } PLAY [Deploy the quadlet demo app] ********************************************* TASK [Gathering Facts] ********************************************************* task path: /WORKDIR/git-weekly-cig48sz_fx/tests/tests_quadlet_demo.yml:9 Saturday 03 August 2024 16:42:33 +0000 (0:00:00.012) 0:00:00.037 ******* ok: [sut] TASK [Generate certificates] *************************************************** task path: /WORKDIR/git-weekly-cig48sz_fx/tests/tests_quadlet_demo.yml:39 Saturday 03 August 2024 16:42:34 +0000 (0:00:00.915) 0:00:00.953 ******* included: fedora.linux_system_roles.certificate for sut TASK [fedora.linux_system_roles.certificate : Set version specific variables] *** task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:2 Saturday 03 August 2024 16:42:34 +0000 (0:00:00.026) 0:00:00.979 ******* included: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/set_vars.yml for sut TASK [fedora.linux_system_roles.certificate : Ensure ansible_facts used by role] *** task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/set_vars.yml:2 Saturday 03 August 2024 16:42:34 +0000 (0:00:00.013) 0:00:00.993 ******* skipping: [sut] => { "changed": false, "false_condition": "__certificate_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.certificate : Check if system is ostree] ******* task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/set_vars.yml:10 Saturday 03 August 2024 16:42:34 +0000 (0:00:00.013) 0:00:01.006 ******* ok: [sut] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.certificate : Set flag to indicate system is ostree] *** task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/set_vars.yml:15 Saturday 03 August 2024 16:42:34 +0000 (0:00:00.242) 0:00:01.248 ******* ok: [sut] => { "ansible_facts": { "__certificate_is_ostree": false }, "changed": false } TASK [fedora.linux_system_roles.certificate : Set platform/version specific variables] *** task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/set_vars.yml:19 Saturday 03 August 2024 16:42:34 +0000 (0:00:00.015) 0:00:01.264 ******* skipping: [sut] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [sut] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [sut] => (item=CentOS_10.yml) => { "ansible_facts": { "__certificate_certmonger_packages": [ "certmonger", "python3-packaging" ] }, "ansible_included_var_files": [ "/WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/certificate/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } ok: [sut] => (item=CentOS_10.yml) => { "ansible_facts": { "__certificate_certmonger_packages": [ "certmonger", "python3-packaging" ] }, "ansible_included_var_files": [ "/WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/certificate/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } TASK [fedora.linux_system_roles.certificate : Ensure certificate role dependencies are installed] *** task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:5 Saturday 03 August 2024 16:42:34 +0000 (0:00:00.025) 0:00:01.289 ******* changed: [sut] => { "changed": true, "rc": 0, "results": [ "Installed: python3-pyasn1-0.5.1-4.el10.noarch", "Installed: python3-cffi-1.16.0-5.el10.x86_64", "Installed: python3-cryptography-41.0.7-2.el10.x86_64", "Installed: python3-ply-3.11-24.el10.noarch", "Installed: python3-pycparser-2.20-15.el10.noarch" ] } lsrpackages: python3-cryptography python3-dbus python3-pyasn1 TASK [fedora.linux_system_roles.certificate : Ensure provider packages are installed] *** task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:23 Saturday 03 August 2024 16:42:37 +0000 (0:00:02.713) 0:00:04.002 ******* changed: [sut] => (item=certmonger) => { "__certificate_provider": "certmonger", "ansible_loop_var": "__certificate_provider", "changed": true, "rc": 0, "results": [ "Installed: nspr-4.35.0-27.el10.x86_64", "Installed: nss-3.101.0-6.el10.x86_64", "Installed: certmonger-0.79.20-2.el10.x86_64", "Installed: dbus-tools-1:1.14.10-4.el10.x86_64", "Installed: nss-softokn-3.101.0-6.el10.x86_64", "Installed: nss-softokn-freebl-3.101.0-6.el10.x86_64", "Installed: nss-sysinit-3.101.0-6.el10.x86_64", "Installed: python3-packaging-23.2-5.el10.noarch", "Installed: nss-util-3.101.0-6.el10.x86_64" ] } lsrpackages: certmonger python3-packaging TASK [fedora.linux_system_roles.certificate : Ensure pre-scripts hooks directory exists] *** task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:35 Saturday 03 August 2024 16:42:40 +0000 (0:00:03.326) 0:00:07.328 ******* changed: [sut] => (item=certmonger) => { "__certificate_provider": "certmonger", "ansible_loop_var": "__certificate_provider", "changed": true, "gid": 0, "group": "root", "mode": "0700", "owner": "root", "path": "/etc/certmonger//pre-scripts", "secontext": "unconfined_u:object_r:etc_t:s0", "size": 6, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.certificate : Ensure post-scripts hooks directory exists] *** task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:61 Saturday 03 August 2024 16:42:40 +0000 (0:00:00.275) 0:00:07.603 ******* changed: [sut] => (item=certmonger) => { "__certificate_provider": "certmonger", "ansible_loop_var": "__certificate_provider", "changed": true, "gid": 0, "group": "root", "mode": "0700", "owner": "root", "path": "/etc/certmonger//post-scripts", "secontext": "unconfined_u:object_r:etc_t:s0", "size": 6, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.certificate : Ensure provider service is running] *** task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:90 Saturday 03 August 2024 16:42:40 +0000 (0:00:00.230) 0:00:07.833 ******* changed: [sut] => (item=certmonger) => { "__certificate_provider": "certmonger", "ansible_loop_var": "__certificate_provider", "changed": true, "enabled": true, "name": "certmonger", "state": "started", "status": { "AccessSELinuxContext": "system_u:object_r:certmonger_unit_file_t:s0", "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "basic.target syslog.target dbus.socket systemd-journald.socket system.slice sysinit.target dbus-broker.service network.target", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "BusName": "org.fedorahosted.certmonger", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "Certificate monitoring and PKI enrollment", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveMemoryHigh": "3700936704", "EffectiveMemoryMax": "3700936704", "EffectiveTasksMax": "22402", "EnvironmentFiles": "/etc/sysconfig/certmonger (ignore_errors=yes)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/sbin/certmonger ; argv[]=/usr/sbin/certmonger -S -p /run/certmonger.pid -n $OPTS ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/sbin/certmonger ; argv[]=/usr/sbin/certmonger -S -p /run/certmonger.pid -n $OPTS ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/usr/lib/systemd/system/certmonger.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "certmonger.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "14001", "LimitNPROCSoft": "14001", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "14001", "LimitSIGPENDINGSoft": "14001", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3298357248", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "[not set]", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "[not set]", "MemorySwapMax": "infinity", "MemorySwapPeak": "[not set]", "MemoryZSwapCurrent": "[not set]", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "certmonger.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "PIDFile": "/run/certmonger.pid", "PartOf": "dbus-broker.service", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "sysinit.target dbus.socket system.slice", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22402", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "dbus", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "disabled", "UtmpMode": "init", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity" } } TASK [fedora.linux_system_roles.certificate : Ensure certificate requests] ***** task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:101 Saturday 03 August 2024 16:42:41 +0000 (0:00:00.775) 0:00:08.608 ******* changed: [sut] => (item={'name': 'quadlet_demo', 'dns': ['localhost'], 'ca': 'self-sign'}) => { "ansible_loop_var": "item", "changed": true, "item": { "ca": "self-sign", "dns": [ "localhost" ], "name": "quadlet_demo" } } MSG: Certificate requested (new). TASK [fedora.linux_system_roles.certificate : Slurp the contents of the files] *** task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:152 Saturday 03 August 2024 16:42:42 +0000 (0:00:00.708) 0:00:09.316 ******* ok: [sut] => (item=['cert', {'name': 'quadlet_demo', 'dns': ['localhost'], 'ca': 'self-sign'}]) => { "ansible_loop_var": "item", "changed": false, "content": "LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSURnakNDQW1xZ0F3SUJBZ0lRY2VuSXVrWFJTR0tnN2tPRzZ2ckNKakFOQmdrcWhraUc5dzBCQVFzRkFEQlEKTVNBd0hnWURWUVFEREJkTWIyTmhiQ0JUYVdkdWFXNW5JRUYxZEdodmNtbDBlVEVzTUNvR0ExVUVBd3dqTnpGbApPV000WW1FdE5EVmtNVFE0TmpJdFlUQmxaVFF6T0RZdFpXRm1ZV015TWpVd0hoY05NalF3T0RBek1UWTBNalF5CldoY05NalV3T0RBek1UWTBNalF4V2pBVU1SSXdFQVlEVlFRREV3bHNiMk5oYkdodmMzUXdnZ0VpTUEwR0NTcUcKU0liM0RRRUJBUVVBQTRJQkR3QXdnZ0VLQW9JQkFRQzI4R0NHSmF2OE5HOGQyREdLUEgzKzhtU1EvWmlhMDJvNgozWGtkYW1adU1GbmZwK3hBVnd4dkZmdFMxR1pXY08wdERNZ0RCc0FSOSsweG8wMGlSOSsyM0lmVmVmSm44MEJwClMyVVgyVmo5akI1enV6aGtnSmk3dy9HMktGeTcxRG1tZzFkSkNKZk1paUFFVmsrS0pQbW1hbTdwV1BaR0RlanUKNTVuY21rY0tnVzltTWIxODNjakhGcU1yZ0EzQWY1WGprOGR1dnBkWGVrT05GM1BIK2FQdW1HV1hxS1Rsb3hwTQppdGpVVWVBd01Ga3lCQkRFZUg2VWNCS2s5SHJOdE5pcUxVZWQ5STNDanVIaFRHOGZyUzQwdTFKdWVjZGVqdkJJCnQ4KzY5QStyeHNKSnVtZkYyUW4rVGxuQTFzRlNZYStIQ0xQODlhcURZeElEaUowR2RLTGpBZ01CQUFHamdaTXcKZ1pBd0N3WURWUjBQQkFRREFnV2dNQlFHQTFVZEVRUU5NQXVDQ1d4dlkyRnNhRzl6ZERBZEJnTlZIU1VFRmpBVQpCZ2dyQmdFRkJRY0RBUVlJS3dZQkJRVUhBd0l3REFZRFZSMFRBUUgvQkFJd0FEQWRCZ05WSFE0RUZnUVUycEFzCi90dFZ0RzdGYTJzaUxtWStzeWltaGhZd0h3WURWUjBqQkJnd0ZvQVVFN3ViWFVEaWlzcHdpOG5scVZxNG5Mem0KL2Q0d0RRWUpLb1pJaHZjTkFRRUxCUUFEZ2dFQkFGQXhjY0VkdUxzRVUxVDliQ2tSSFhqY2k0SjdURjYrOUFCcQpKbXY2bWZmWEdoSUM0NW8vVXFRRFp0RnhXQzJTWEVzOWp3VmZjOWhrQUFEeE80YXdmdVQrSm9LYzFQdmFXR043CjkveWxJSkgyV2E2UnhvS2hvYWQ2UGpXcW0rd0s0Y2JGTWxqeWdGRy9Hc1hWWFBUNE9veVUreHhaNjlURDZ4MVkKUE5UZDZKdTByOVZzandYVk85UldockRyV2ZtWVQrU2lOT3ZiUFNIeld4OVBHdFFxYmVCQzI2YkxRbzN4aXZsMwprT2s0eXBXVGpSSnpqN2lkVFJTMFdtemlQN3Fncm5DNTBkUndNWWJqMzJxTDVER29zSjE2eStQblhQcGNDT0hPCklYYU9XdkhUcVRQQ2JxSG1zYUhsdEFDSkNDaWRuSWtaZUM4USszVFhlTWloVjduR1MyQT0KLS0tLS1FTkQgQ0VSVElGSUNBVEUtLS0tLQo=", "encoding": "base64", "item": [ "cert", { "ca": "self-sign", "dns": [ "localhost" ], "name": "quadlet_demo" } ], "source": "/etc/pki/tls/certs/quadlet_demo.crt" } ok: [sut] => (item=['key', {'name': 'quadlet_demo', 'dns': ['localhost'], 'ca': 'self-sign'}]) => { "ansible_loop_var": "item", "changed": false, "content": "LS0tLS1CRUdJTiBQUklWQVRFIEtFWS0tLS0tCk1JSUV2UUlCQURBTkJna3Foa2lHOXcwQkFRRUZBQVNDQktjd2dnU2pBZ0VBQW9JQkFRQzI4R0NHSmF2OE5HOGQKMkRHS1BIMys4bVNRL1ppYTAybzYzWGtkYW1adU1GbmZwK3hBVnd4dkZmdFMxR1pXY08wdERNZ0RCc0FSOSsweApvMDBpUjkrMjNJZlZlZkpuODBCcFMyVVgyVmo5akI1enV6aGtnSmk3dy9HMktGeTcxRG1tZzFkSkNKZk1paUFFClZrK0tKUG1tYW03cFdQWkdEZWp1NTVuY21rY0tnVzltTWIxODNjakhGcU1yZ0EzQWY1WGprOGR1dnBkWGVrT04KRjNQSCthUHVtR1dYcUtUbG94cE1pdGpVVWVBd01Ga3lCQkRFZUg2VWNCS2s5SHJOdE5pcUxVZWQ5STNDanVIaApURzhmclM0MHUxSnVlY2RlanZCSXQ4KzY5QStyeHNKSnVtZkYyUW4rVGxuQTFzRlNZYStIQ0xQODlhcURZeElECmlKMEdkS0xqQWdNQkFBRUNnZ0VBREx6UVNRdzBIVDQ4dUdiQkRaOTlwRjZWNnY4di9NNXQrSnIxZFJDMHEyL0UKRk9NT2U1ZTBDVzRONUVGZXZoNWtjcEpsRmgrR1hjU1YvWWYzSUcvQnRNRkdnZkl1UGRlb1FLSHhHdDBxMUQzegozQzllZmZ6eTVZRlVyS1Z0empRNDJXTzVhWi9tdGVWWTFRUjZJRTV5Y0RNUlN1cHQvTE1PUGgySkpJZVNIUTh5CkphZVpvZ3VPREwxNjVBS1pBbGFFeGVBNFRXem53dlpRVi84aXlxd251ZXVIQW0ydVAwdWFGejFGMTFraVJ6L2kKbGFIa053ZkNtcWxHUklyTi9WZjViUjkwbDd1NTBhMXIvUU9GQWVVaWhRYUpUNVVNSnc0dTJHRWtPOE9sSUllTgo0SGVaVStsSlJVb3hwM2VmQTltOFR1ODk1MmluRkdZOVJQcU5WTkwxaVFLQmdRRHZoc3NwUTN4WVJuNXBPN2ZqCmg1SVJRcEVUbW15SitIaUlVMVd2T3J2eDZzeTF4eEFmV3k3NWFOaUwrZVV1ZTNLdmVHbHY5S0ttcFRsZkFXMVAKWndOaVRZMTJrdittUGpQV0d2dVVWY0trSGhFc0Jrb2p6L0JSZzREMUdXUE1YeHZCekRsd2xxU2pxdjhzZkUvRApDMUZnaXo3TGl5YzdhM0gvaGVZenovWnQ2UUtCZ1FERGhVZFZSUzN1M1E2eUhhQnhyakpsSCtUREtqcG5nOGxOCjRxSHhMb2tWSnErVEYraWY1Q0Y4K0ZjUXd3Z2RzR1NvZUpwQTNUbnJ1N2dyUkQ2OXFKTzBRVURoaEVSMEJON1UKQmRtb0hZSTBlQVFnakExYmJIUW9NbEtud0o5WjFVWW4wMUpSeE5SK1Y0YzVxcGtESFA0SGZkSFBBMGlSUE5hbwpYMEdVejRNTzZ3S0JnR1cvTFlmT3VnQkptYU1jRWdZZDdhZFZUb2swUE16eTlraDZsZGhFcTllY3RPNjJxNXVXClFVTUREWkdRU3BGUE1TMW1pZXRiME1mWFFhS2cyWTAwSmhUWStyR1d5ZkFWVmpHcDdwcFpQOEExWVhnQkIvZGsKZWlJMktVNTBSWmRvQTBOMGZDWGVaL3Q2Uk1EaDRZUHB4OXlwNmJmTzJaQjlHTVRCWFVNL0lPQVpBb0dBV2k4ZQpvUWxBejUydFNxRDV3WlhjN3VJUUxhbEx1VFdEWStaMlZmM21BeGlPQWFZVWliTzdjcDVGNHBoakpQYVk1MUNCCnJndWN6TXdRZmZ5RGlBMjYyM2tCeVVPdUkybWZzbUprYS9QNGUrVWoyak5mY2QwTGt3THJOeTlWRVNXYnI0RDgKeG5Tb293SVNnSm5jN0IreVdKaERiMTRQV2RhYkdKeWZibGx0RlRNQ2dZRUFoM3l4RU5VUkcrV3N6Yy9ZZVlXOAp3bGZzN3Ricmk4MFJoVzk3eHMyN092cE5zZkVWUnRzQTUzMHY2dnJsTjRtUGMwU1Z1YzRLZWVCUkpWL09IM0w3CjJNQVR0ZDJzL1V5T3RPOW1VVmswa0VxTVR4ZjVLMnpOc0dybG1LRktOK1ZSV2U2dHp2dUVvajVPVjBXemZxTE4KbWFibitMWkN2UHhWTXVNUmNrYzBLMVk9Ci0tLS0tRU5EIFBSSVZBVEUgS0VZLS0tLS0K", "encoding": "base64", "item": [ "key", { "ca": "self-sign", "dns": [ "localhost" ], "name": "quadlet_demo" } ], "source": "/etc/pki/tls/private/quadlet_demo.key" } ok: [sut] => (item=['ca', {'name': 'quadlet_demo', 'dns': ['localhost'], 'ca': 'self-sign'}]) => { "ansible_loop_var": "item", "changed": false, "content": "LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSURnakNDQW1xZ0F3SUJBZ0lRY2VuSXVrWFJTR0tnN2tPRzZ2ckNKakFOQmdrcWhraUc5dzBCQVFzRkFEQlEKTVNBd0hnWURWUVFEREJkTWIyTmhiQ0JUYVdkdWFXNW5JRUYxZEdodmNtbDBlVEVzTUNvR0ExVUVBd3dqTnpGbApPV000WW1FdE5EVmtNVFE0TmpJdFlUQmxaVFF6T0RZdFpXRm1ZV015TWpVd0hoY05NalF3T0RBek1UWTBNalF5CldoY05NalV3T0RBek1UWTBNalF4V2pBVU1SSXdFQVlEVlFRREV3bHNiMk5oYkdodmMzUXdnZ0VpTUEwR0NTcUcKU0liM0RRRUJBUVVBQTRJQkR3QXdnZ0VLQW9JQkFRQzI4R0NHSmF2OE5HOGQyREdLUEgzKzhtU1EvWmlhMDJvNgozWGtkYW1adU1GbmZwK3hBVnd4dkZmdFMxR1pXY08wdERNZ0RCc0FSOSsweG8wMGlSOSsyM0lmVmVmSm44MEJwClMyVVgyVmo5akI1enV6aGtnSmk3dy9HMktGeTcxRG1tZzFkSkNKZk1paUFFVmsrS0pQbW1hbTdwV1BaR0RlanUKNTVuY21rY0tnVzltTWIxODNjakhGcU1yZ0EzQWY1WGprOGR1dnBkWGVrT05GM1BIK2FQdW1HV1hxS1Rsb3hwTQppdGpVVWVBd01Ga3lCQkRFZUg2VWNCS2s5SHJOdE5pcUxVZWQ5STNDanVIaFRHOGZyUzQwdTFKdWVjZGVqdkJJCnQ4KzY5QStyeHNKSnVtZkYyUW4rVGxuQTFzRlNZYStIQ0xQODlhcURZeElEaUowR2RLTGpBZ01CQUFHamdaTXcKZ1pBd0N3WURWUjBQQkFRREFnV2dNQlFHQTFVZEVRUU5NQXVDQ1d4dlkyRnNhRzl6ZERBZEJnTlZIU1VFRmpBVQpCZ2dyQmdFRkJRY0RBUVlJS3dZQkJRVUhBd0l3REFZRFZSMFRBUUgvQkFJd0FEQWRCZ05WSFE0RUZnUVUycEFzCi90dFZ0RzdGYTJzaUxtWStzeWltaGhZd0h3WURWUjBqQkJnd0ZvQVVFN3ViWFVEaWlzcHdpOG5scVZxNG5Mem0KL2Q0d0RRWUpLb1pJaHZjTkFRRUxCUUFEZ2dFQkFGQXhjY0VkdUxzRVUxVDliQ2tSSFhqY2k0SjdURjYrOUFCcQpKbXY2bWZmWEdoSUM0NW8vVXFRRFp0RnhXQzJTWEVzOWp3VmZjOWhrQUFEeE80YXdmdVQrSm9LYzFQdmFXR043CjkveWxJSkgyV2E2UnhvS2hvYWQ2UGpXcW0rd0s0Y2JGTWxqeWdGRy9Hc1hWWFBUNE9veVUreHhaNjlURDZ4MVkKUE5UZDZKdTByOVZzandYVk85UldockRyV2ZtWVQrU2lOT3ZiUFNIeld4OVBHdFFxYmVCQzI2YkxRbzN4aXZsMwprT2s0eXBXVGpSSnpqN2lkVFJTMFdtemlQN3Fncm5DNTBkUndNWWJqMzJxTDVER29zSjE2eStQblhQcGNDT0hPCklYYU9XdkhUcVRQQ2JxSG1zYUhsdEFDSkNDaWRuSWtaZUM4USszVFhlTWloVjduR1MyQT0KLS0tLS1FTkQgQ0VSVElGSUNBVEUtLS0tLQo=", "encoding": "base64", "item": [ "ca", { "ca": "self-sign", "dns": [ "localhost" ], "name": "quadlet_demo" } ], "source": "/etc/pki/tls/certs/quadlet_demo.crt" } TASK [fedora.linux_system_roles.certificate : Create return data] ************** task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:160 Saturday 03 August 2024 16:42:43 +0000 (0:00:00.616) 0:00:09.933 ******* ok: [sut] => { "ansible_facts": { "certificate_test_certs": { "quadlet_demo": { "ca": "/etc/pki/tls/certs/quadlet_demo.crt", "ca_content": "-----BEGIN CERTIFICATE-----\nMIIDgjCCAmqgAwIBAgIQcenIukXRSGKg7kOG6vrCJjANBgkqhkiG9w0BAQsFADBQ\nMSAwHgYDVQQDDBdMb2NhbCBTaWduaW5nIEF1dGhvcml0eTEsMCoGA1UEAwwjNzFl\nOWM4YmEtNDVkMTQ4NjItYTBlZTQzODYtZWFmYWMyMjUwHhcNMjQwODAzMTY0MjQy\nWhcNMjUwODAzMTY0MjQxWjAUMRIwEAYDVQQDEwlsb2NhbGhvc3QwggEiMA0GCSqG\nSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC28GCGJav8NG8d2DGKPH3+8mSQ/Zia02o6\n3XkdamZuMFnfp+xAVwxvFftS1GZWcO0tDMgDBsAR9+0xo00iR9+23IfVefJn80Bp\nS2UX2Vj9jB5zuzhkgJi7w/G2KFy71Dmmg1dJCJfMiiAEVk+KJPmmam7pWPZGDeju\n55ncmkcKgW9mMb183cjHFqMrgA3Af5Xjk8duvpdXekONF3PH+aPumGWXqKTloxpM\nitjUUeAwMFkyBBDEeH6UcBKk9HrNtNiqLUed9I3CjuHhTG8frS40u1JuecdejvBI\nt8+69A+rxsJJumfF2Qn+TlnA1sFSYa+HCLP89aqDYxIDiJ0GdKLjAgMBAAGjgZMw\ngZAwCwYDVR0PBAQDAgWgMBQGA1UdEQQNMAuCCWxvY2FsaG9zdDAdBgNVHSUEFjAU\nBggrBgEFBQcDAQYIKwYBBQUHAwIwDAYDVR0TAQH/BAIwADAdBgNVHQ4EFgQU2pAs\n/ttVtG7Fa2siLmY+syimhhYwHwYDVR0jBBgwFoAUE7ubXUDiispwi8nlqVq4nLzm\n/d4wDQYJKoZIhvcNAQELBQADggEBAFAxccEduLsEU1T9bCkRHXjci4J7TF6+9ABq\nJmv6mffXGhIC45o/UqQDZtFxWC2SXEs9jwVfc9hkAADxO4awfuT+JoKc1PvaWGN7\n9/ylIJH2Wa6RxoKhoad6PjWqm+wK4cbFMljygFG/GsXVXPT4OoyU+xxZ69TD6x1Y\nPNTd6Ju0r9VsjwXVO9RWhrDrWfmYT+SiNOvbPSHzWx9PGtQqbeBC26bLQo3xivl3\nkOk4ypWTjRJzj7idTRS0WmziP7qgrnC50dRwMYbj32qL5DGosJ16y+PnXPpcCOHO\nIXaOWvHTqTPCbqHmsaHltACJCCidnIkZeC8Q+3TXeMihV7nGS2A=\n-----END CERTIFICATE-----\n", "cert": "/etc/pki/tls/certs/quadlet_demo.crt", "cert_content": "-----BEGIN CERTIFICATE-----\nMIIDgjCCAmqgAwIBAgIQcenIukXRSGKg7kOG6vrCJjANBgkqhkiG9w0BAQsFADBQ\nMSAwHgYDVQQDDBdMb2NhbCBTaWduaW5nIEF1dGhvcml0eTEsMCoGA1UEAwwjNzFl\nOWM4YmEtNDVkMTQ4NjItYTBlZTQzODYtZWFmYWMyMjUwHhcNMjQwODAzMTY0MjQy\nWhcNMjUwODAzMTY0MjQxWjAUMRIwEAYDVQQDEwlsb2NhbGhvc3QwggEiMA0GCSqG\nSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC28GCGJav8NG8d2DGKPH3+8mSQ/Zia02o6\n3XkdamZuMFnfp+xAVwxvFftS1GZWcO0tDMgDBsAR9+0xo00iR9+23IfVefJn80Bp\nS2UX2Vj9jB5zuzhkgJi7w/G2KFy71Dmmg1dJCJfMiiAEVk+KJPmmam7pWPZGDeju\n55ncmkcKgW9mMb183cjHFqMrgA3Af5Xjk8duvpdXekONF3PH+aPumGWXqKTloxpM\nitjUUeAwMFkyBBDEeH6UcBKk9HrNtNiqLUed9I3CjuHhTG8frS40u1JuecdejvBI\nt8+69A+rxsJJumfF2Qn+TlnA1sFSYa+HCLP89aqDYxIDiJ0GdKLjAgMBAAGjgZMw\ngZAwCwYDVR0PBAQDAgWgMBQGA1UdEQQNMAuCCWxvY2FsaG9zdDAdBgNVHSUEFjAU\nBggrBgEFBQcDAQYIKwYBBQUHAwIwDAYDVR0TAQH/BAIwADAdBgNVHQ4EFgQU2pAs\n/ttVtG7Fa2siLmY+syimhhYwHwYDVR0jBBgwFoAUE7ubXUDiispwi8nlqVq4nLzm\n/d4wDQYJKoZIhvcNAQELBQADggEBAFAxccEduLsEU1T9bCkRHXjci4J7TF6+9ABq\nJmv6mffXGhIC45o/UqQDZtFxWC2SXEs9jwVfc9hkAADxO4awfuT+JoKc1PvaWGN7\n9/ylIJH2Wa6RxoKhoad6PjWqm+wK4cbFMljygFG/GsXVXPT4OoyU+xxZ69TD6x1Y\nPNTd6Ju0r9VsjwXVO9RWhrDrWfmYT+SiNOvbPSHzWx9PGtQqbeBC26bLQo3xivl3\nkOk4ypWTjRJzj7idTRS0WmziP7qgrnC50dRwMYbj32qL5DGosJ16y+PnXPpcCOHO\nIXaOWvHTqTPCbqHmsaHltACJCCidnIkZeC8Q+3TXeMihV7nGS2A=\n-----END CERTIFICATE-----\n", "key": "/etc/pki/tls/private/quadlet_demo.key", "key_content": "-----BEGIN PRIVATE KEY-----\nMIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQC28GCGJav8NG8d\n2DGKPH3+8mSQ/Zia02o63XkdamZuMFnfp+xAVwxvFftS1GZWcO0tDMgDBsAR9+0x\no00iR9+23IfVefJn80BpS2UX2Vj9jB5zuzhkgJi7w/G2KFy71Dmmg1dJCJfMiiAE\nVk+KJPmmam7pWPZGDeju55ncmkcKgW9mMb183cjHFqMrgA3Af5Xjk8duvpdXekON\nF3PH+aPumGWXqKTloxpMitjUUeAwMFkyBBDEeH6UcBKk9HrNtNiqLUed9I3CjuHh\nTG8frS40u1JuecdejvBIt8+69A+rxsJJumfF2Qn+TlnA1sFSYa+HCLP89aqDYxID\niJ0GdKLjAgMBAAECggEADLzQSQw0HT48uGbBDZ99pF6V6v8v/M5t+Jr1dRC0q2/E\nFOMOe5e0CW4N5EFevh5kcpJlFh+GXcSV/Yf3IG/BtMFGgfIuPdeoQKHxGt0q1D3z\n3C9effzy5YFUrKVtzjQ42WO5aZ/mteVY1QR6IE5ycDMRSupt/LMOPh2JJIeSHQ8y\nJaeZoguODL165AKZAlaExeA4TWznwvZQV/8iyqwnueuHAm2uP0uaFz1F11kiRz/i\nlaHkNwfCmqlGRIrN/Vf5bR90l7u50a1r/QOFAeUihQaJT5UMJw4u2GEkO8OlIIeN\n4HeZU+lJRUoxp3efA9m8Tu8952inFGY9RPqNVNL1iQKBgQDvhsspQ3xYRn5pO7fj\nh5IRQpETmmyJ+HiIU1WvOrvx6sy1xxAfWy75aNiL+eUue3KveGlv9KKmpTlfAW1P\nZwNiTY12kv+mPjPWGvuUVcKkHhEsBkojz/BRg4D1GWPMXxvBzDlwlqSjqv8sfE/D\nC1Fgiz7Liyc7a3H/heYzz/Zt6QKBgQDDhUdVRS3u3Q6yHaBxrjJlH+TDKjpng8lN\n4qHxLokVJq+TF+if5CF8+FcQwwgdsGSoeJpA3Tnru7grRD69qJO0QUDhhER0BN7U\nBdmoHYI0eAQgjA1bbHQoMlKnwJ9Z1UYn01JRxNR+V4c5qpkDHP4HfdHPA0iRPNao\nX0GUz4MO6wKBgGW/LYfOugBJmaMcEgYd7adVTok0PMzy9kh6ldhEq9ectO62q5uW\nQUMDDZGQSpFPMS1mietb0MfXQaKg2Y00JhTY+rGWyfAVVjGp7ppZP8A1YXgBB/dk\neiI2KU50RZdoA0N0fCXeZ/t6RMDh4YPpx9yp6bfO2ZB9GMTBXUM/IOAZAoGAWi8e\noQlAz52tSqD5wZXc7uIQLalLuTWDY+Z2Vf3mAxiOAaYUibO7cp5F4phjJPaY51CB\nrguczMwQffyDiA2623kByUOuI2mfsmJka/P4e+Uj2jNfcd0LkwLrNy9VESWbr4D8\nxnSoowISgJnc7B+yWJhDb14PWdabGJyfblltFTMCgYEAh3yxENURG+Wszc/YeYW8\nwlfs7tbri80RhW97xs27OvpNsfEVRtsA530v6vrlN4mPc0SVuc4KeeBRJV/OH3L7\n2MATtd2s/UyOtO9mUVk0kEqMTxf5K2zNsGrlmKFKN+VRWe6tzvuEoj5OV0WzfqLN\nmabn+LZCvPxVMuMRckc0K1Y=\n-----END PRIVATE KEY-----\n" } } }, "changed": false } TASK [fedora.linux_system_roles.certificate : Stop tracking certificates] ****** task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:176 Saturday 03 August 2024 16:42:43 +0000 (0:00:00.021) 0:00:09.954 ******* ok: [sut] => (item={'cert': '/etc/pki/tls/certs/quadlet_demo.crt', 'cert_content': '-----BEGIN CERTIFICATE-----\nMIIDgjCCAmqgAwIBAgIQcenIukXRSGKg7kOG6vrCJjANBgkqhkiG9w0BAQsFADBQ\nMSAwHgYDVQQDDBdMb2NhbCBTaWduaW5nIEF1dGhvcml0eTEsMCoGA1UEAwwjNzFl\nOWM4YmEtNDVkMTQ4NjItYTBlZTQzODYtZWFmYWMyMjUwHhcNMjQwODAzMTY0MjQy\nWhcNMjUwODAzMTY0MjQxWjAUMRIwEAYDVQQDEwlsb2NhbGhvc3QwggEiMA0GCSqG\nSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC28GCGJav8NG8d2DGKPH3+8mSQ/Zia02o6\n3XkdamZuMFnfp+xAVwxvFftS1GZWcO0tDMgDBsAR9+0xo00iR9+23IfVefJn80Bp\nS2UX2Vj9jB5zuzhkgJi7w/G2KFy71Dmmg1dJCJfMiiAEVk+KJPmmam7pWPZGDeju\n55ncmkcKgW9mMb183cjHFqMrgA3Af5Xjk8duvpdXekONF3PH+aPumGWXqKTloxpM\nitjUUeAwMFkyBBDEeH6UcBKk9HrNtNiqLUed9I3CjuHhTG8frS40u1JuecdejvBI\nt8+69A+rxsJJumfF2Qn+TlnA1sFSYa+HCLP89aqDYxIDiJ0GdKLjAgMBAAGjgZMw\ngZAwCwYDVR0PBAQDAgWgMBQGA1UdEQQNMAuCCWxvY2FsaG9zdDAdBgNVHSUEFjAU\nBggrBgEFBQcDAQYIKwYBBQUHAwIwDAYDVR0TAQH/BAIwADAdBgNVHQ4EFgQU2pAs\n/ttVtG7Fa2siLmY+syimhhYwHwYDVR0jBBgwFoAUE7ubXUDiispwi8nlqVq4nLzm\n/d4wDQYJKoZIhvcNAQELBQADggEBAFAxccEduLsEU1T9bCkRHXjci4J7TF6+9ABq\nJmv6mffXGhIC45o/UqQDZtFxWC2SXEs9jwVfc9hkAADxO4awfuT+JoKc1PvaWGN7\n9/ylIJH2Wa6RxoKhoad6PjWqm+wK4cbFMljygFG/GsXVXPT4OoyU+xxZ69TD6x1Y\nPNTd6Ju0r9VsjwXVO9RWhrDrWfmYT+SiNOvbPSHzWx9PGtQqbeBC26bLQo3xivl3\nkOk4ypWTjRJzj7idTRS0WmziP7qgrnC50dRwMYbj32qL5DGosJ16y+PnXPpcCOHO\nIXaOWvHTqTPCbqHmsaHltACJCCidnIkZeC8Q+3TXeMihV7nGS2A=\n-----END CERTIFICATE-----\n', 'key': '/etc/pki/tls/private/quadlet_demo.key', 'key_content': '-----BEGIN PRIVATE KEY-----\nMIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQC28GCGJav8NG8d\n2DGKPH3+8mSQ/Zia02o63XkdamZuMFnfp+xAVwxvFftS1GZWcO0tDMgDBsAR9+0x\no00iR9+23IfVefJn80BpS2UX2Vj9jB5zuzhkgJi7w/G2KFy71Dmmg1dJCJfMiiAE\nVk+KJPmmam7pWPZGDeju55ncmkcKgW9mMb183cjHFqMrgA3Af5Xjk8duvpdXekON\nF3PH+aPumGWXqKTloxpMitjUUeAwMFkyBBDEeH6UcBKk9HrNtNiqLUed9I3CjuHh\nTG8frS40u1JuecdejvBIt8+69A+rxsJJumfF2Qn+TlnA1sFSYa+HCLP89aqDYxID\niJ0GdKLjAgMBAAECggEADLzQSQw0HT48uGbBDZ99pF6V6v8v/M5t+Jr1dRC0q2/E\nFOMOe5e0CW4N5EFevh5kcpJlFh+GXcSV/Yf3IG/BtMFGgfIuPdeoQKHxGt0q1D3z\n3C9effzy5YFUrKVtzjQ42WO5aZ/mteVY1QR6IE5ycDMRSupt/LMOPh2JJIeSHQ8y\nJaeZoguODL165AKZAlaExeA4TWznwvZQV/8iyqwnueuHAm2uP0uaFz1F11kiRz/i\nlaHkNwfCmqlGRIrN/Vf5bR90l7u50a1r/QOFAeUihQaJT5UMJw4u2GEkO8OlIIeN\n4HeZU+lJRUoxp3efA9m8Tu8952inFGY9RPqNVNL1iQKBgQDvhsspQ3xYRn5pO7fj\nh5IRQpETmmyJ+HiIU1WvOrvx6sy1xxAfWy75aNiL+eUue3KveGlv9KKmpTlfAW1P\nZwNiTY12kv+mPjPWGvuUVcKkHhEsBkojz/BRg4D1GWPMXxvBzDlwlqSjqv8sfE/D\nC1Fgiz7Liyc7a3H/heYzz/Zt6QKBgQDDhUdVRS3u3Q6yHaBxrjJlH+TDKjpng8lN\n4qHxLokVJq+TF+if5CF8+FcQwwgdsGSoeJpA3Tnru7grRD69qJO0QUDhhER0BN7U\nBdmoHYI0eAQgjA1bbHQoMlKnwJ9Z1UYn01JRxNR+V4c5qpkDHP4HfdHPA0iRPNao\nX0GUz4MO6wKBgGW/LYfOugBJmaMcEgYd7adVTok0PMzy9kh6ldhEq9ectO62q5uW\nQUMDDZGQSpFPMS1mietb0MfXQaKg2Y00JhTY+rGWyfAVVjGp7ppZP8A1YXgBB/dk\neiI2KU50RZdoA0N0fCXeZ/t6RMDh4YPpx9yp6bfO2ZB9GMTBXUM/IOAZAoGAWi8e\noQlAz52tSqD5wZXc7uIQLalLuTWDY+Z2Vf3mAxiOAaYUibO7cp5F4phjJPaY51CB\nrguczMwQffyDiA2623kByUOuI2mfsmJka/P4e+Uj2jNfcd0LkwLrNy9VESWbr4D8\nxnSoowISgJnc7B+yWJhDb14PWdabGJyfblltFTMCgYEAh3yxENURG+Wszc/YeYW8\nwlfs7tbri80RhW97xs27OvpNsfEVRtsA530v6vrlN4mPc0SVuc4KeeBRJV/OH3L7\n2MATtd2s/UyOtO9mUVk0kEqMTxf5K2zNsGrlmKFKN+VRWe6tzvuEoj5OV0WzfqLN\nmabn+LZCvPxVMuMRckc0K1Y=\n-----END PRIVATE KEY-----\n', 'ca': '/etc/pki/tls/certs/quadlet_demo.crt', 'ca_content': '-----BEGIN CERTIFICATE-----\nMIIDgjCCAmqgAwIBAgIQcenIukXRSGKg7kOG6vrCJjANBgkqhkiG9w0BAQsFADBQ\nMSAwHgYDVQQDDBdMb2NhbCBTaWduaW5nIEF1dGhvcml0eTEsMCoGA1UEAwwjNzFl\nOWM4YmEtNDVkMTQ4NjItYTBlZTQzODYtZWFmYWMyMjUwHhcNMjQwODAzMTY0MjQy\nWhcNMjUwODAzMTY0MjQxWjAUMRIwEAYDVQQDEwlsb2NhbGhvc3QwggEiMA0GCSqG\nSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC28GCGJav8NG8d2DGKPH3+8mSQ/Zia02o6\n3XkdamZuMFnfp+xAVwxvFftS1GZWcO0tDMgDBsAR9+0xo00iR9+23IfVefJn80Bp\nS2UX2Vj9jB5zuzhkgJi7w/G2KFy71Dmmg1dJCJfMiiAEVk+KJPmmam7pWPZGDeju\n55ncmkcKgW9mMb183cjHFqMrgA3Af5Xjk8duvpdXekONF3PH+aPumGWXqKTloxpM\nitjUUeAwMFkyBBDEeH6UcBKk9HrNtNiqLUed9I3CjuHhTG8frS40u1JuecdejvBI\nt8+69A+rxsJJumfF2Qn+TlnA1sFSYa+HCLP89aqDYxIDiJ0GdKLjAgMBAAGjgZMw\ngZAwCwYDVR0PBAQDAgWgMBQGA1UdEQQNMAuCCWxvY2FsaG9zdDAdBgNVHSUEFjAU\nBggrBgEFBQcDAQYIKwYBBQUHAwIwDAYDVR0TAQH/BAIwADAdBgNVHQ4EFgQU2pAs\n/ttVtG7Fa2siLmY+syimhhYwHwYDVR0jBBgwFoAUE7ubXUDiispwi8nlqVq4nLzm\n/d4wDQYJKoZIhvcNAQELBQADggEBAFAxccEduLsEU1T9bCkRHXjci4J7TF6+9ABq\nJmv6mffXGhIC45o/UqQDZtFxWC2SXEs9jwVfc9hkAADxO4awfuT+JoKc1PvaWGN7\n9/ylIJH2Wa6RxoKhoad6PjWqm+wK4cbFMljygFG/GsXVXPT4OoyU+xxZ69TD6x1Y\nPNTd6Ju0r9VsjwXVO9RWhrDrWfmYT+SiNOvbPSHzWx9PGtQqbeBC26bLQo3xivl3\nkOk4ypWTjRJzj7idTRS0WmziP7qgrnC50dRwMYbj32qL5DGosJ16y+PnXPpcCOHO\nIXaOWvHTqTPCbqHmsaHltACJCCidnIkZeC8Q+3TXeMihV7nGS2A=\n-----END CERTIFICATE-----\n'}) => { "ansible_loop_var": "item", "changed": false, "cmd": [ "getcert", "stop-tracking", "-f", "/etc/pki/tls/certs/quadlet_demo.crt" ], "delta": "0:00:00.025318", "end": "2024-08-03 16:42:43.361110", "item": { "ca": "/etc/pki/tls/certs/quadlet_demo.crt", "ca_content": "-----BEGIN CERTIFICATE-----\nMIIDgjCCAmqgAwIBAgIQcenIukXRSGKg7kOG6vrCJjANBgkqhkiG9w0BAQsFADBQ\nMSAwHgYDVQQDDBdMb2NhbCBTaWduaW5nIEF1dGhvcml0eTEsMCoGA1UEAwwjNzFl\nOWM4YmEtNDVkMTQ4NjItYTBlZTQzODYtZWFmYWMyMjUwHhcNMjQwODAzMTY0MjQy\nWhcNMjUwODAzMTY0MjQxWjAUMRIwEAYDVQQDEwlsb2NhbGhvc3QwggEiMA0GCSqG\nSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC28GCGJav8NG8d2DGKPH3+8mSQ/Zia02o6\n3XkdamZuMFnfp+xAVwxvFftS1GZWcO0tDMgDBsAR9+0xo00iR9+23IfVefJn80Bp\nS2UX2Vj9jB5zuzhkgJi7w/G2KFy71Dmmg1dJCJfMiiAEVk+KJPmmam7pWPZGDeju\n55ncmkcKgW9mMb183cjHFqMrgA3Af5Xjk8duvpdXekONF3PH+aPumGWXqKTloxpM\nitjUUeAwMFkyBBDEeH6UcBKk9HrNtNiqLUed9I3CjuHhTG8frS40u1JuecdejvBI\nt8+69A+rxsJJumfF2Qn+TlnA1sFSYa+HCLP89aqDYxIDiJ0GdKLjAgMBAAGjgZMw\ngZAwCwYDVR0PBAQDAgWgMBQGA1UdEQQNMAuCCWxvY2FsaG9zdDAdBgNVHSUEFjAU\nBggrBgEFBQcDAQYIKwYBBQUHAwIwDAYDVR0TAQH/BAIwADAdBgNVHQ4EFgQU2pAs\n/ttVtG7Fa2siLmY+syimhhYwHwYDVR0jBBgwFoAUE7ubXUDiispwi8nlqVq4nLzm\n/d4wDQYJKoZIhvcNAQELBQADggEBAFAxccEduLsEU1T9bCkRHXjci4J7TF6+9ABq\nJmv6mffXGhIC45o/UqQDZtFxWC2SXEs9jwVfc9hkAADxO4awfuT+JoKc1PvaWGN7\n9/ylIJH2Wa6RxoKhoad6PjWqm+wK4cbFMljygFG/GsXVXPT4OoyU+xxZ69TD6x1Y\nPNTd6Ju0r9VsjwXVO9RWhrDrWfmYT+SiNOvbPSHzWx9PGtQqbeBC26bLQo3xivl3\nkOk4ypWTjRJzj7idTRS0WmziP7qgrnC50dRwMYbj32qL5DGosJ16y+PnXPpcCOHO\nIXaOWvHTqTPCbqHmsaHltACJCCidnIkZeC8Q+3TXeMihV7nGS2A=\n-----END CERTIFICATE-----\n", "cert": "/etc/pki/tls/certs/quadlet_demo.crt", "cert_content": "-----BEGIN CERTIFICATE-----\nMIIDgjCCAmqgAwIBAgIQcenIukXRSGKg7kOG6vrCJjANBgkqhkiG9w0BAQsFADBQ\nMSAwHgYDVQQDDBdMb2NhbCBTaWduaW5nIEF1dGhvcml0eTEsMCoGA1UEAwwjNzFl\nOWM4YmEtNDVkMTQ4NjItYTBlZTQzODYtZWFmYWMyMjUwHhcNMjQwODAzMTY0MjQy\nWhcNMjUwODAzMTY0MjQxWjAUMRIwEAYDVQQDEwlsb2NhbGhvc3QwggEiMA0GCSqG\nSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC28GCGJav8NG8d2DGKPH3+8mSQ/Zia02o6\n3XkdamZuMFnfp+xAVwxvFftS1GZWcO0tDMgDBsAR9+0xo00iR9+23IfVefJn80Bp\nS2UX2Vj9jB5zuzhkgJi7w/G2KFy71Dmmg1dJCJfMiiAEVk+KJPmmam7pWPZGDeju\n55ncmkcKgW9mMb183cjHFqMrgA3Af5Xjk8duvpdXekONF3PH+aPumGWXqKTloxpM\nitjUUeAwMFkyBBDEeH6UcBKk9HrNtNiqLUed9I3CjuHhTG8frS40u1JuecdejvBI\nt8+69A+rxsJJumfF2Qn+TlnA1sFSYa+HCLP89aqDYxIDiJ0GdKLjAgMBAAGjgZMw\ngZAwCwYDVR0PBAQDAgWgMBQGA1UdEQQNMAuCCWxvY2FsaG9zdDAdBgNVHSUEFjAU\nBggrBgEFBQcDAQYIKwYBBQUHAwIwDAYDVR0TAQH/BAIwADAdBgNVHQ4EFgQU2pAs\n/ttVtG7Fa2siLmY+syimhhYwHwYDVR0jBBgwFoAUE7ubXUDiispwi8nlqVq4nLzm\n/d4wDQYJKoZIhvcNAQELBQADggEBAFAxccEduLsEU1T9bCkRHXjci4J7TF6+9ABq\nJmv6mffXGhIC45o/UqQDZtFxWC2SXEs9jwVfc9hkAADxO4awfuT+JoKc1PvaWGN7\n9/ylIJH2Wa6RxoKhoad6PjWqm+wK4cbFMljygFG/GsXVXPT4OoyU+xxZ69TD6x1Y\nPNTd6Ju0r9VsjwXVO9RWhrDrWfmYT+SiNOvbPSHzWx9PGtQqbeBC26bLQo3xivl3\nkOk4ypWTjRJzj7idTRS0WmziP7qgrnC50dRwMYbj32qL5DGosJ16y+PnXPpcCOHO\nIXaOWvHTqTPCbqHmsaHltACJCCidnIkZeC8Q+3TXeMihV7nGS2A=\n-----END CERTIFICATE-----\n", "key": "/etc/pki/tls/private/quadlet_demo.key", "key_content": "-----BEGIN PRIVATE KEY-----\nMIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQC28GCGJav8NG8d\n2DGKPH3+8mSQ/Zia02o63XkdamZuMFnfp+xAVwxvFftS1GZWcO0tDMgDBsAR9+0x\no00iR9+23IfVefJn80BpS2UX2Vj9jB5zuzhkgJi7w/G2KFy71Dmmg1dJCJfMiiAE\nVk+KJPmmam7pWPZGDeju55ncmkcKgW9mMb183cjHFqMrgA3Af5Xjk8duvpdXekON\nF3PH+aPumGWXqKTloxpMitjUUeAwMFkyBBDEeH6UcBKk9HrNtNiqLUed9I3CjuHh\nTG8frS40u1JuecdejvBIt8+69A+rxsJJumfF2Qn+TlnA1sFSYa+HCLP89aqDYxID\niJ0GdKLjAgMBAAECggEADLzQSQw0HT48uGbBDZ99pF6V6v8v/M5t+Jr1dRC0q2/E\nFOMOe5e0CW4N5EFevh5kcpJlFh+GXcSV/Yf3IG/BtMFGgfIuPdeoQKHxGt0q1D3z\n3C9effzy5YFUrKVtzjQ42WO5aZ/mteVY1QR6IE5ycDMRSupt/LMOPh2JJIeSHQ8y\nJaeZoguODL165AKZAlaExeA4TWznwvZQV/8iyqwnueuHAm2uP0uaFz1F11kiRz/i\nlaHkNwfCmqlGRIrN/Vf5bR90l7u50a1r/QOFAeUihQaJT5UMJw4u2GEkO8OlIIeN\n4HeZU+lJRUoxp3efA9m8Tu8952inFGY9RPqNVNL1iQKBgQDvhsspQ3xYRn5pO7fj\nh5IRQpETmmyJ+HiIU1WvOrvx6sy1xxAfWy75aNiL+eUue3KveGlv9KKmpTlfAW1P\nZwNiTY12kv+mPjPWGvuUVcKkHhEsBkojz/BRg4D1GWPMXxvBzDlwlqSjqv8sfE/D\nC1Fgiz7Liyc7a3H/heYzz/Zt6QKBgQDDhUdVRS3u3Q6yHaBxrjJlH+TDKjpng8lN\n4qHxLokVJq+TF+if5CF8+FcQwwgdsGSoeJpA3Tnru7grRD69qJO0QUDhhER0BN7U\nBdmoHYI0eAQgjA1bbHQoMlKnwJ9Z1UYn01JRxNR+V4c5qpkDHP4HfdHPA0iRPNao\nX0GUz4MO6wKBgGW/LYfOugBJmaMcEgYd7adVTok0PMzy9kh6ldhEq9ectO62q5uW\nQUMDDZGQSpFPMS1mietb0MfXQaKg2Y00JhTY+rGWyfAVVjGp7ppZP8A1YXgBB/dk\neiI2KU50RZdoA0N0fCXeZ/t6RMDh4YPpx9yp6bfO2ZB9GMTBXUM/IOAZAoGAWi8e\noQlAz52tSqD5wZXc7uIQLalLuTWDY+Z2Vf3mAxiOAaYUibO7cp5F4phjJPaY51CB\nrguczMwQffyDiA2623kByUOuI2mfsmJka/P4e+Uj2jNfcd0LkwLrNy9VESWbr4D8\nxnSoowISgJnc7B+yWJhDb14PWdabGJyfblltFTMCgYEAh3yxENURG+Wszc/YeYW8\nwlfs7tbri80RhW97xs27OvpNsfEVRtsA530v6vrlN4mPc0SVuc4KeeBRJV/OH3L7\n2MATtd2s/UyOtO9mUVk0kEqMTxf5K2zNsGrlmKFKN+VRWe6tzvuEoj5OV0WzfqLN\nmabn+LZCvPxVMuMRckc0K1Y=\n-----END PRIVATE KEY-----\n" }, "rc": 0, "start": "2024-08-03 16:42:43.335792" } STDOUT: Request "20240803164242" removed. TASK [fedora.linux_system_roles.certificate : Remove files] ******************** task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:181 Saturday 03 August 2024 16:42:43 +0000 (0:00:00.287) 0:00:10.241 ******* changed: [sut] => (item=/etc/pki/tls/certs/quadlet_demo.crt) => { "ansible_loop_var": "item", "changed": true, "item": "/etc/pki/tls/certs/quadlet_demo.crt", "path": "/etc/pki/tls/certs/quadlet_demo.crt", "state": "absent" } changed: [sut] => (item=/etc/pki/tls/private/quadlet_demo.key) => { "ansible_loop_var": "item", "changed": true, "item": "/etc/pki/tls/private/quadlet_demo.key", "path": "/etc/pki/tls/private/quadlet_demo.key", "state": "absent" } ok: [sut] => (item=/etc/pki/tls/certs/quadlet_demo.crt) => { "ansible_loop_var": "item", "changed": false, "item": "/etc/pki/tls/certs/quadlet_demo.crt", "path": "/etc/pki/tls/certs/quadlet_demo.crt", "state": "absent" } TASK [Run the role] ************************************************************ task path: /WORKDIR/git-weekly-cig48sz_fx/tests/tests_quadlet_demo.yml:50 Saturday 03 August 2024 16:42:44 +0000 (0:00:00.671) 0:00:10.912 ******* included: fedora.linux_system_roles.podman for sut TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:3 Saturday 03 August 2024 16:42:44 +0000 (0:00:00.034) 0:00:10.947 ******* included: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml for sut TASK [fedora.linux_system_roles.podman : Ensure ansible_facts used by role] **** task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:3 Saturday 03 August 2024 16:42:44 +0000 (0:00:00.016) 0:00:10.963 ******* skipping: [sut] => { "changed": false, "false_condition": "__podman_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if system is ostree] ************ task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:11 Saturday 03 August 2024 16:42:44 +0000 (0:00:00.014) 0:00:10.978 ******* ok: [sut] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.podman : Set flag to indicate system is ostree] *** task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:16 Saturday 03 August 2024 16:42:44 +0000 (0:00:00.203) 0:00:11.182 ******* ok: [sut] => { "ansible_facts": { "__podman_is_ostree": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:20 Saturday 03 August 2024 16:42:44 +0000 (0:00:00.016) 0:00:11.198 ******* ok: [sut] => (item=RedHat.yml) => { "ansible_facts": { "__podman_packages": [ "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/podman/vars/RedHat.yml" ], "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml" } skipping: [sut] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [sut] => (item=CentOS_10.yml) => { "ansible_facts": { "__podman_packages": [ "iptables-nft", "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } ok: [sut] => (item=CentOS_10.yml) => { "ansible_facts": { "__podman_packages": [ "iptables-nft", "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } TASK [fedora.linux_system_roles.podman : Gather the package facts] ************* task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 Saturday 03 August 2024 16:42:44 +0000 (0:00:00.028) 0:00:11.227 ******* ok: [sut] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Enable copr if requested] ************* task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:10 Saturday 03 August 2024 16:42:45 +0000 (0:00:00.678) 0:00:11.906 ******* skipping: [sut] => { "changed": false, "false_condition": "podman_use_copr | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure required packages are installed] *** task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:14 Saturday 03 August 2024 16:42:45 +0000 (0:00:00.021) 0:00:11.928 ******* changed: [sut] => { "changed": true, "rc": 0, "results": [ "Installed: conmon-2:2.1.12-2.el10.x86_64", "Installed: aardvark-dns-2:1.11.0-4.el10.x86_64", "Installed: podman-5:5.1.2-2.el10.x86_64", "Installed: shadow-utils-subid-2:4.15.0-3.el10.x86_64", "Installed: container-selinux-3:2.232.1-3.el10.noarch", "Installed: containers-common-5:0.57.3-4.el10.noarch", "Installed: containers-common-extra-5:0.57.3-4.el10.noarch", "Installed: netavark-2:1.11.0-4.el10.x86_64", "Installed: passt-0^20240624.g1ee2eca-1.el10.x86_64", "Installed: passt-selinux-0^20240624.g1ee2eca-1.el10.noarch", "Installed: criu-3.19-5.el10.x86_64", "Installed: criu-libs-3.19-5.el10.x86_64", "Installed: crun-1.15-4.el10.x86_64", "Installed: fuse-overlayfs-1.13-2.el10.x86_64", "Installed: fuse3-3.16.2-5.el10.x86_64", "Installed: libnet-1.3-4.el10.x86_64", "Installed: gvisor-tap-vsock-6:0.7.3-4.el10.x86_64", "Installed: gvisor-tap-vsock-gvforwarder-6:0.7.3-4.el10.x86_64", "Installed: catatonit-5:0.1.7-7.el10.x86_64", "Installed: fuse-common-3.16.2-5.el10.x86_64" ] } lsrpackages: iptables-nft podman shadow-utils-subid TASK [fedora.linux_system_roles.podman : Get podman version] ******************* task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:22 Saturday 03 August 2024 16:43:55 +0000 (0:01:10.793) 0:01:22.721 ******* ok: [sut] => { "changed": false, "cmd": [ "podman", "--version" ], "delta": "0:00:00.031738", "end": "2024-08-03 16:43:56.061866", "rc": 0, "start": "2024-08-03 16:43:56.030128" } STDOUT: podman version 5.1.2 TASK [fedora.linux_system_roles.podman : Set podman version] ******************* task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:28 Saturday 03 August 2024 16:43:56 +0000 (0:00:00.227) 0:01:22.949 ******* ok: [sut] => { "ansible_facts": { "podman_version": "5.1.2" }, "changed": false } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.2 or later] *** task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:32 Saturday 03 August 2024 16:43:56 +0000 (0:00:00.019) 0:01:22.968 ******* skipping: [sut] => { "changed": false, "false_condition": "podman_version is version(\"4.2\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:39 Saturday 03 August 2024 16:43:56 +0000 (0:00:00.017) 0:01:22.986 ******* skipping: [sut] => { "changed": false, "false_condition": "podman_version is version(\"4.4\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:49 Saturday 03 August 2024 16:43:56 +0000 (0:00:00.042) 0:01:23.029 ******* META: end_host conditional evaluated to False, continuing execution for sut skipping: [sut] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for sut" } MSG: end_host conditional evaluated to false, continuing execution for sut TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:56 Saturday 03 August 2024 16:43:56 +0000 (0:00:00.040) 0:01:23.069 ******* included: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for sut TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 03 August 2024 16:43:56 +0000 (0:00:00.037) 0:01:23.106 ******* ok: [sut] => { "ansible_facts": { "getent_passwd": { "root": [ "x", "0", "0", "Super User", "/root", "/bin/bash" ] } }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 03 August 2024 16:43:56 +0000 (0:00:00.287) 0:01:23.394 ******* skipping: [sut] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 03 August 2024 16:43:56 +0000 (0:00:00.020) 0:01:23.415 ******* ok: [sut] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get group information] **************** task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:28 Saturday 03 August 2024 16:43:56 +0000 (0:00:00.024) 0:01:23.440 ******* ok: [sut] => { "ansible_facts": { "getent_group": { "root": [ "x", "0", "" ] } }, "changed": false } TASK [fedora.linux_system_roles.podman : Set group name] *********************** task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:35 Saturday 03 August 2024 16:43:56 +0000 (0:00:00.201) 0:01:23.642 ******* ok: [sut] => { "ansible_facts": { "__podman_group_name": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:39 Saturday 03 August 2024 16:43:56 +0000 (0:00:00.024) 0:01:23.666 ******* ok: [sut] => { "changed": false, "stat": { "atime": 1719187200.0, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "86395ad7ce62834c967dc50f963a68f042029188", "ctime": 1722703416.657093, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 4660166, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1719187200.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15728, "uid": 0, "version": "3584316354", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check user with getsubids] ************ task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:50 Saturday 03 August 2024 16:43:57 +0000 (0:00:00.203) 0:01:23.869 ******* skipping: [sut] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check group with getsubids] *********** task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:55 Saturday 03 August 2024 16:43:57 +0000 (0:00:00.018) 0:01:23.887 ******* skipping: [sut] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:60 Saturday 03 August 2024 16:43:57 +0000 (0:00:00.017) 0:01:23.905 ******* skipping: [sut] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:74 Saturday 03 August 2024 16:43:57 +0000 (0:00:00.017) 0:01:23.922 ******* skipping: [sut] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:79 Saturday 03 August 2024 16:43:57 +0000 (0:00:00.017) 0:01:23.940 ******* skipping: [sut] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:84 Saturday 03 August 2024 16:43:57 +0000 (0:00:00.018) 0:01:23.958 ******* skipping: [sut] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:94 Saturday 03 August 2024 16:43:57 +0000 (0:00:00.017) 0:01:23.975 ******* skipping: [sut] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if group not in subgid file] ***** task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:101 Saturday 03 August 2024 16:43:57 +0000 (0:00:00.017) 0:01:23.993 ******* skipping: [sut] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set config file paths] **************** task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:62 Saturday 03 August 2024 16:43:57 +0000 (0:00:00.017) 0:01:24.010 ******* ok: [sut] => { "ansible_facts": { "__podman_container_conf_file": "/etc/containers/containers.conf.d/50-systemroles.conf", "__podman_policy_json_file": "/etc/containers/policy.json", "__podman_registries_conf_file": "/etc/containers/registries.conf.d/50-systemroles.conf", "__podman_storage_conf_file": "/etc/containers/storage.conf" }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle container.conf.d] ************** task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:71 Saturday 03 August 2024 16:43:57 +0000 (0:00:00.036) 0:01:24.046 ******* included: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml for sut TASK [fedora.linux_system_roles.podman : Ensure containers.d exists] *********** task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:5 Saturday 03 August 2024 16:43:57 +0000 (0:00:00.033) 0:01:24.079 ******* skipping: [sut] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update container config file] ********* task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:13 Saturday 03 August 2024 16:43:57 +0000 (0:00:00.017) 0:01:24.097 ******* skipping: [sut] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle registries.conf.d] ************* task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:74 Saturday 03 August 2024 16:43:57 +0000 (0:00:00.017) 0:01:24.114 ******* included: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml for sut TASK [fedora.linux_system_roles.podman : Ensure registries.d exists] *********** task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:5 Saturday 03 August 2024 16:43:57 +0000 (0:00:00.033) 0:01:24.147 ******* skipping: [sut] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update registries config file] ******** task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:13 Saturday 03 August 2024 16:43:57 +0000 (0:00:00.016) 0:01:24.163 ******* skipping: [sut] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle storage.conf] ****************** task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:77 Saturday 03 August 2024 16:43:57 +0000 (0:00:00.041) 0:01:24.205 ******* included: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml for sut TASK [fedora.linux_system_roles.podman : Ensure storage.conf parent dir exists] *** task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:5 Saturday 03 August 2024 16:43:57 +0000 (0:00:00.034) 0:01:24.240 ******* skipping: [sut] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update storage config file] *********** task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:13 Saturday 03 August 2024 16:43:57 +0000 (0:00:00.017) 0:01:24.257 ******* skipping: [sut] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle policy.json] ******************* task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:80 Saturday 03 August 2024 16:43:57 +0000 (0:00:00.016) 0:01:24.274 ******* included: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml for sut TASK [fedora.linux_system_roles.podman : Ensure policy.json parent dir exists] *** task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:6 Saturday 03 August 2024 16:43:57 +0000 (0:00:00.036) 0:01:24.310 ******* skipping: [sut] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat the policy.json file] ************ task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:14 Saturday 03 August 2024 16:43:57 +0000 (0:00:00.017) 0:01:24.328 ******* skipping: [sut] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get the existing policy.json] ********* task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:19 Saturday 03 August 2024 16:43:57 +0000 (0:00:00.016) 0:01:24.344 ******* skipping: [sut] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Write new policy.json file] *********** task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:25 Saturday 03 August 2024 16:43:57 +0000 (0:00:00.017) 0:01:24.362 ******* skipping: [sut] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [Manage firewall for specified ports] ************************************* task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:86 Saturday 03 August 2024 16:43:57 +0000 (0:00:00.016) 0:01:24.379 ******* included: fedora.linux_system_roles.firewall for sut TASK [fedora.linux_system_roles.firewall : Setup firewalld] ******************** task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:2 Saturday 03 August 2024 16:43:57 +0000 (0:00:00.061) 0:01:24.440 ******* included: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml for sut TASK [fedora.linux_system_roles.firewall : Ensure ansible_facts used by role] *** task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:2 Saturday 03 August 2024 16:43:57 +0000 (0:00:00.031) 0:01:24.472 ******* skipping: [sut] => { "changed": false, "false_condition": "__firewall_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Check if system is ostree] ********** task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:10 Saturday 03 August 2024 16:43:57 +0000 (0:00:00.021) 0:01:24.493 ******* ok: [sut] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.firewall : Set flag to indicate system is ostree] *** task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:15 Saturday 03 August 2024 16:43:57 +0000 (0:00:00.197) 0:01:24.691 ******* ok: [sut] => { "ansible_facts": { "__firewall_is_ostree": false }, "changed": false } TASK [fedora.linux_system_roles.firewall : Check if transactional-update exists in /sbin] *** task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:22 Saturday 03 August 2024 16:43:57 +0000 (0:00:00.020) 0:01:24.712 ******* ok: [sut] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.firewall : Set flag if transactional-update exists] *** task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:27 Saturday 03 August 2024 16:43:58 +0000 (0:00:00.195) 0:01:24.907 ******* ok: [sut] => { "ansible_facts": { "__firewall_is_transactional": false }, "changed": false } TASK [fedora.linux_system_roles.firewall : Install firewalld] ****************** task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:31 Saturday 03 August 2024 16:43:58 +0000 (0:00:00.021) 0:01:24.929 ******* ok: [sut] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: firewalld TASK [fedora.linux_system_roles.firewall : Notify user that reboot is needed to apply changes] *** task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:43 Saturday 03 August 2024 16:43:58 +0000 (0:00:00.634) 0:01:25.563 ******* skipping: [sut] => { "false_condition": "__firewall_is_transactional | d(false)" } TASK [fedora.linux_system_roles.firewall : Reboot transactional update systems] *** task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:48 Saturday 03 August 2024 16:43:58 +0000 (0:00:00.018) 0:01:25.582 ******* skipping: [sut] => { "changed": false, "false_condition": "__firewall_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Fail if reboot is needed and not set] *** task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:53 Saturday 03 August 2024 16:43:58 +0000 (0:00:00.018) 0:01:25.600 ******* skipping: [sut] => { "changed": false, "false_condition": "__firewall_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Collect service facts] ************** task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:5 Saturday 03 August 2024 16:43:58 +0000 (0:00:00.016) 0:01:25.617 ******* skipping: [sut] => { "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Attempt to stop and disable conflicting services] *** task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:9 Saturday 03 August 2024 16:43:58 +0000 (0:00:00.016) 0:01:25.633 ******* skipping: [sut] => (item=nftables) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": "nftables", "skip_reason": "Conditional result was False" } skipping: [sut] => (item=iptables) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": "iptables", "skip_reason": "Conditional result was False" } skipping: [sut] => (item=ufw) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": "ufw", "skip_reason": "Conditional result was False" } skipping: [sut] => { "changed": false } MSG: All items skipped TASK [fedora.linux_system_roles.firewall : Unmask firewalld service] *********** task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:22 Saturday 03 August 2024 16:43:58 +0000 (0:00:00.019) 0:01:25.653 ******* ok: [sut] => { "changed": false, "name": "firewalld", "status": { "AccessSELinuxContext": "system_u:object_r:firewalld_unit_file_t:s0", "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "system.slice sysinit.target polkit.service dbus.socket basic.target dbus-broker.service", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "shutdown.target network-pre.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "BusName": "org.fedoraproject.FirewallD1", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "yes", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_tty_config cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "ebtables.service shutdown.target ipset.service ip6tables.service iptables.service", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "firewalld - dynamic firewall daemon", "DeviceAllow": "char-rtc r", "DevicePolicy": "closed", "Documentation": "\"man:firewalld(1)\"", "DynamicUser": "no", "EffectiveMemoryHigh": "3700936704", "EffectiveMemoryMax": "3700936704", "EffectiveTasksMax": "22402", "EnvironmentFiles": "/etc/sysconfig/firewalld (ignore_errors=yes)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecReload": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecReloadEx": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStart": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/usr/lib/systemd/system/firewalld.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "firewalld.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "14001", "LimitNPROCSoft": "14001", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "14001", "LimitSIGPENDINGSoft": "14001", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "yes", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3201249280", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "yes", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "[not set]", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "[not set]", "MemorySwapMax": "infinity", "MemorySwapPeak": "[not set]", "MemoryZSwapCurrent": "[not set]", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "firewalld.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "yes", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "yes", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "yes", "ProtectControlGroups": "yes", "ProtectHome": "yes", "ProtectHostname": "yes", "ProtectKernelLogs": "yes", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "yes", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "system.slice dbus.socket sysinit.target", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "yes", "RestrictSUIDSGID": "yes", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "StandardError": "null", "StandardInput": "null", "StandardOutput": "null", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallArchitectures": "native", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22402", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "dbus", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "enabled", "UnitFileState": "disabled", "UtmpMode": "init", "Wants": "network-pre.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity" } } TASK [fedora.linux_system_roles.firewall : Enable and start firewalld service] *** task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:28 Saturday 03 August 2024 16:43:59 +0000 (0:00:00.355) 0:01:26.008 ******* changed: [sut] => { "changed": true, "enabled": true, "name": "firewalld", "state": "started", "status": { "AccessSELinuxContext": "system_u:object_r:firewalld_unit_file_t:s0", "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "polkit.service dbus.socket sysinit.target basic.target system.slice dbus-broker.service", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "shutdown.target network-pre.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "BusName": "org.fedoraproject.FirewallD1", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "yes", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_tty_config cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "ebtables.service ipset.service ip6tables.service iptables.service shutdown.target", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "firewalld - dynamic firewall daemon", "DeviceAllow": "char-rtc r", "DevicePolicy": "closed", "Documentation": "\"man:firewalld(1)\"", "DynamicUser": "no", "EffectiveMemoryHigh": "3700936704", "EffectiveMemoryMax": "3700936704", "EffectiveTasksMax": "22402", "EnvironmentFiles": "/etc/sysconfig/firewalld (ignore_errors=yes)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecReload": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecReloadEx": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStart": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/usr/lib/systemd/system/firewalld.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "firewalld.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "14001", "LimitNPROCSoft": "14001", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "14001", "LimitSIGPENDINGSoft": "14001", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "yes", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3200487424", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "yes", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "[not set]", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "[not set]", "MemorySwapMax": "infinity", "MemorySwapPeak": "[not set]", "MemoryZSwapCurrent": "[not set]", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "firewalld.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "yes", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "yes", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "yes", "ProtectControlGroups": "yes", "ProtectHome": "yes", "ProtectHostname": "yes", "ProtectKernelLogs": "yes", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "yes", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "system.slice sysinit.target dbus.socket", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "yes", "RestrictSUIDSGID": "yes", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "StandardError": "null", "StandardInput": "null", "StandardOutput": "null", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallArchitectures": "native", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22402", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "dbus", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "enabled", "UnitFileState": "disabled", "UtmpMode": "init", "Wants": "network-pre.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity" } } TASK [fedora.linux_system_roles.firewall : Check if previous replaced is defined] *** task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:34 Saturday 03 August 2024 16:44:00 +0000 (0:00:01.454) 0:01:27.463 ******* ok: [sut] => { "ansible_facts": { "__firewall_previous_replaced": false, "__firewall_python_cmd": "/usr/bin/python3.12", "__firewall_report_changed": true }, "changed": false } TASK [fedora.linux_system_roles.firewall : Get config files, checksums before and remove] *** task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:43 Saturday 03 August 2024 16:44:00 +0000 (0:00:00.024) 0:01:27.487 ******* skipping: [sut] => { "changed": false, "false_condition": "__firewall_previous_replaced | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Tell firewall module it is able to report changed] *** task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:55 Saturday 03 August 2024 16:44:00 +0000 (0:00:00.017) 0:01:27.504 ******* skipping: [sut] => { "changed": false, "false_condition": "__firewall_previous_replaced | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Configure firewall] ***************** task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:71 Saturday 03 August 2024 16:44:00 +0000 (0:00:00.016) 0:01:27.520 ******* failed: [sut] (item={'port': '8000/tcp', 'state': 'enabled'}) => { "ansible_loop_var": "item", "changed": false, "item": { "port": "8000/tcp", "state": "enabled" } } MSG: [Errno 13] Permission denied: '/etc/firewalld/zones/public.xml' failed: [sut] (item={'port': '9000/tcp', 'state': 'enabled'}) => { "ansible_loop_var": "item", "changed": false, "item": { "port": "9000/tcp", "state": "enabled" } } MSG: [Errno 13] Permission denied: '/etc/firewalld/zones/public.xml' TASK [Dump journal] ************************************************************ task path: /WORKDIR/git-weekly-cig48sz_fx/tests/tests_quadlet_demo.yml:130 Saturday 03 August 2024 16:44:01 +0000 (0:00:01.104) 0:01:28.625 ******* fatal: [sut]: FAILED! => { "changed": false, "cmd": [ "journalctl", "-ex" ], "delta": "0:00:00.027621", "end": "2024-08-03 16:44:01.961884", "failed_when_result": true, "rc": 0, "start": "2024-08-03 16:44:01.934263" } STDOUT: Aug 03 16:38:29 localhost systemd[1]: Starting systemd-udev-trigger.service - Coldplug All udev Devices... Aug 03 16:38:29 localhost systemd[1]: modprobe@configfs.service: Deactivated successfully. Aug 03 16:38:29 localhost systemd[1]: Finished modprobe@configfs.service - Load Kernel Module configfs. Aug 03 16:38:29 localhost systemd[1]: Finished kmod-static-nodes.service - Create List of Static Device Nodes. Aug 03 16:38:29 localhost systemd[1]: modprobe@drm.service: Deactivated successfully. Aug 03 16:38:29 localhost systemd[1]: Finished modprobe@drm.service - Load Kernel Module drm. Aug 03 16:38:29 localhost systemd[1]: modprobe@efi_pstore.service: Deactivated successfully. Aug 03 16:38:29 localhost systemd[1]: Finished modprobe@efi_pstore.service - Load Kernel Module efi_pstore. Aug 03 16:38:29 localhost systemd[1]: Starting systemd-tmpfiles-setup-dev-early.service - Create Static Device Nodes in /dev gracefully... Aug 03 16:38:29 localhost kernel: loop: module loaded Aug 03 16:38:29 localhost systemd[1]: Mounted dev-hugepages.mount - Huge Pages File System. Aug 03 16:38:29 localhost systemd[1]: Mounted dev-mqueue.mount - POSIX Message Queue File System. Aug 03 16:38:29 localhost systemd[1]: Mounted sys-kernel-debug.mount - Kernel Debug File System. Aug 03 16:38:29 localhost systemd[1]: Mounted sys-kernel-tracing.mount - Kernel Trace File System. Aug 03 16:38:29 localhost systemd[1]: modprobe@loop.service: Deactivated successfully. Aug 03 16:38:29 localhost systemd[1]: Finished modprobe@loop.service - Load Kernel Module loop. Aug 03 16:38:29 localhost systemd[1]: Finished systemd-remount-fs.service - Remount Root and Kernel File Systems. Aug 03 16:38:29 localhost systemd[1]: systemd-hwdb-update.service - Rebuild Hardware Database was skipped because of an unmet condition check (ConditionNeedsUpdate=/etc). Aug 03 16:38:29 localhost systemd[1]: systemd-pstore.service - Platform Persistent Storage Archival was skipped because of an unmet condition check (ConditionDirectoryNotEmpty=/sys/fs/pstore). Aug 03 16:38:29 localhost kernel: fuse: init (API version 7.40) Aug 03 16:38:29 localhost kernel: device-mapper: core: CONFIG_IMA_DISABLE_HTABLE is disabled. Duplicate IMA measurements will not be recorded in the IMA log. Aug 03 16:38:29 localhost systemd-journald[476]: Collecting audit messages is disabled. Aug 03 16:38:29 localhost systemd[1]: Starting systemd-random-seed.service - Load/Save OS Random Seed... Aug 03 16:38:29 localhost kernel: device-mapper: uevent: version 1.0.3 Aug 03 16:38:29 localhost systemd[1]: systemd-tpm2-setup.service - TPM SRK Setup was skipped because of an unmet condition check (ConditionSecurity=measured-uki). Aug 03 16:38:29 localhost systemd[1]: modprobe@fuse.service: Deactivated successfully. Aug 03 16:38:29 localhost kernel: device-mapper: ioctl: 4.48.0-ioctl (2023-03-01) initialised: dm-devel@lists.linux.dev Aug 03 16:38:29 localhost systemd[1]: Finished modprobe@fuse.service - Load Kernel Module fuse. Aug 03 16:38:29 localhost systemd[1]: modprobe@dm_mod.service: Deactivated successfully. Aug 03 16:38:29 localhost systemd[1]: Finished modprobe@dm_mod.service - Load Kernel Module dm_mod. Aug 03 16:38:29 localhost systemd[1]: Finished systemd-network-generator.service - Generate network units from Kernel command line. Aug 03 16:38:29 localhost systemd[1]: Finished systemd-udev-load-credentials.service - Load udev Rules from Credentials. Aug 03 16:38:29 localhost systemd[1]: systemd-repart.service - Repartition Root Disk was skipped because no trigger condition checks were met. Aug 03 16:38:29 localhost systemd-journald[476]: Journal started ░░ Subject: The journal has been started ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The system journal process has started up, opened the journal ░░ files for writing and is now ready to process requests. Aug 03 16:38:29 localhost systemd-journald[476]: Runtime Journal (/run/log/journal/ec255fe88ae7744a0ac9c75566afb243) is 8M, max 70.5M, 62.5M free. ░░ Subject: Disk space used by the journal ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ Runtime Journal (/run/log/journal/ec255fe88ae7744a0ac9c75566afb243) is currently using 8M. ░░ Maximum allowed usage is set to 70.5M. ░░ Leaving at least 35.2M free (of currently available 689.8M of disk space). ░░ Enforced usage limit is thus 70.5M, of which 62.5M are still available. ░░ ░░ The limits controlling how much disk space is used by the journal may ░░ be configured with SystemMaxUse=, SystemKeepFree=, SystemMaxFileSize=, ░░ RuntimeMaxUse=, RuntimeKeepFree=, RuntimeMaxFileSize= settings in ░░ /etc/systemd/journald.conf. See journald.conf(5) for details. Aug 03 16:38:29 localhost systemd[1]: Queued start job for default target multi-user.target. Aug 03 16:38:29 localhost systemd[1]: systemd-journald.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-journald.service has successfully entered the 'dead' state. Aug 03 16:38:29 localhost systemd[1]: Started systemd-journald.service - Journal Service. Aug 03 16:38:29 localhost systemd[1]: Finished systemd-sysctl.service - Apply Kernel Variables. ░░ Subject: A start job for unit systemd-sysctl.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-sysctl.service has finished successfully. ░░ ░░ The job identifier is 160. Aug 03 16:38:29 localhost systemd[1]: Starting systemd-journal-flush.service - Flush Journal to Persistent Storage... ░░ Subject: A start job for unit systemd-journal-flush.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-journal-flush.service has begun execution. ░░ ░░ The job identifier is 195. Aug 03 16:38:29 localhost systemd[1]: Finished systemd-udev-trigger.service - Coldplug All udev Devices. ░░ Subject: A start job for unit systemd-udev-trigger.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-udev-trigger.service has finished successfully. ░░ ░░ The job identifier is 175. Aug 03 16:38:29 localhost systemd[1]: Finished systemd-random-seed.service - Load/Save OS Random Seed. ░░ Subject: A start job for unit systemd-random-seed.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-random-seed.service has finished successfully. ░░ ░░ The job identifier is 193. Aug 03 16:38:29 localhost systemd-journald[476]: Runtime Journal (/run/log/journal/ec255fe88ae7744a0ac9c75566afb243) is 8M, max 70.5M, 62.5M free. ░░ Subject: Disk space used by the journal ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ Runtime Journal (/run/log/journal/ec255fe88ae7744a0ac9c75566afb243) is currently using 8M. ░░ Maximum allowed usage is set to 70.5M. ░░ Leaving at least 35.2M free (of currently available 689.8M of disk space). ░░ Enforced usage limit is thus 70.5M, of which 62.5M are still available. ░░ ░░ The limits controlling how much disk space is used by the journal may ░░ be configured with SystemMaxUse=, SystemKeepFree=, SystemMaxFileSize=, ░░ RuntimeMaxUse=, RuntimeKeepFree=, RuntimeMaxFileSize= settings in ░░ /etc/systemd/journald.conf. See journald.conf(5) for details. Aug 03 16:38:30 localhost systemd-journald[476]: Received client request to flush runtime journal. Aug 03 16:38:30 localhost systemd[1]: Finished systemd-journal-flush.service - Flush Journal to Persistent Storage. ░░ Subject: A start job for unit systemd-journal-flush.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-journal-flush.service has finished successfully. ░░ ░░ The job identifier is 195. Aug 03 16:38:30 localhost systemd[1]: Mounting sys-fs-fuse-connections.mount - FUSE Control File System... ░░ Subject: A start job for unit sys-fs-fuse-connections.mount has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sys-fs-fuse-connections.mount has begun execution. ░░ ░░ The job identifier is 158. Aug 03 16:38:30 localhost systemd[1]: Mounted sys-fs-fuse-connections.mount - FUSE Control File System. ░░ Subject: A start job for unit sys-fs-fuse-connections.mount has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sys-fs-fuse-connections.mount has finished successfully. ░░ ░░ The job identifier is 158. Aug 03 16:38:30 localhost systemd[1]: Finished lvm2-monitor.service - Monitoring of LVM2 mirrors, snapshots etc. using dmeventd or progress polling. ░░ Subject: A start job for unit lvm2-monitor.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit lvm2-monitor.service has finished successfully. ░░ ░░ The job identifier is 147. Aug 03 16:38:31 localhost systemd[1]: Finished systemd-tmpfiles-setup-dev-early.service - Create Static Device Nodes in /dev gracefully. ░░ Subject: A start job for unit systemd-tmpfiles-setup-dev-early.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-tmpfiles-setup-dev-early.service has finished successfully. ░░ ░░ The job identifier is 131. Aug 03 16:38:31 localhost systemd[1]: systemd-sysusers.service - Create System Users was skipped because no trigger condition checks were met. ░░ Subject: A start job for unit systemd-sysusers.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-sysusers.service has finished successfully. ░░ ░░ The job identifier is 137. Aug 03 16:38:31 localhost systemd[1]: Starting systemd-tmpfiles-setup-dev.service - Create Static Device Nodes in /dev... ░░ Subject: A start job for unit systemd-tmpfiles-setup-dev.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-tmpfiles-setup-dev.service has begun execution. ░░ ░░ The job identifier is 128. Aug 03 16:38:31 localhost systemd[1]: Finished systemd-tmpfiles-setup-dev.service - Create Static Device Nodes in /dev. ░░ Subject: A start job for unit systemd-tmpfiles-setup-dev.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-tmpfiles-setup-dev.service has finished successfully. ░░ ░░ The job identifier is 128. Aug 03 16:38:31 localhost systemd[1]: Reached target local-fs-pre.target - Preparation for Local File Systems. ░░ Subject: A start job for unit local-fs-pre.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit local-fs-pre.target has finished successfully. ░░ ░░ The job identifier is 129. Aug 03 16:38:31 localhost systemd[1]: Reached target local-fs.target - Local File Systems. ░░ Subject: A start job for unit local-fs.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit local-fs.target has finished successfully. ░░ ░░ The job identifier is 138. Aug 03 16:38:31 localhost systemd[1]: Listening on systemd-bootctl.socket - Boot Entries Service Socket. ░░ Subject: A start job for unit systemd-bootctl.socket has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-bootctl.socket has finished successfully. ░░ ░░ The job identifier is 220. Aug 03 16:38:31 localhost systemd[1]: Listening on systemd-sysext.socket - System Extension Image Management. ░░ Subject: A start job for unit systemd-sysext.socket has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-sysext.socket has finished successfully. ░░ ░░ The job identifier is 212. Aug 03 16:38:31 localhost systemd[1]: ldconfig.service - Rebuild Dynamic Linker Cache was skipped because no trigger condition checks were met. ░░ Subject: A start job for unit ldconfig.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit ldconfig.service has finished successfully. ░░ ░░ The job identifier is 167. Aug 03 16:38:31 localhost systemd[1]: selinux-autorelabel-mark.service - Mark the need to relabel after reboot was skipped because of an unmet condition check (ConditionSecurity=!selinux). ░░ Subject: A start job for unit selinux-autorelabel-mark.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit selinux-autorelabel-mark.service has finished successfully. ░░ ░░ The job identifier is 161. Aug 03 16:38:31 localhost systemd[1]: systemd-binfmt.service - Set Up Additional Binary Formats was skipped because no trigger condition checks were met. ░░ Subject: A start job for unit systemd-binfmt.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-binfmt.service has finished successfully. ░░ ░░ The job identifier is 188. Aug 03 16:38:31 localhost systemd[1]: systemd-boot-random-seed.service - Update Boot Loader Random Seed was skipped because no trigger condition checks were met. ░░ Subject: A start job for unit systemd-boot-random-seed.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-boot-random-seed.service has finished successfully. ░░ ░░ The job identifier is 136. Aug 03 16:38:31 localhost systemd[1]: systemd-confext.service - Merge System Configuration Images into /etc/ was skipped because no trigger condition checks were met. ░░ Subject: A start job for unit systemd-confext.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-confext.service has finished successfully. ░░ ░░ The job identifier is 177. Aug 03 16:38:31 localhost systemd[1]: systemd-sysext.service - Merge System Extension Images into /usr/ and /opt/ was skipped because no trigger condition checks were met. ░░ Subject: A start job for unit systemd-sysext.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-sysext.service has finished successfully. ░░ ░░ The job identifier is 191. Aug 03 16:38:31 localhost systemd[1]: Starting systemd-tmpfiles-setup.service - Create System Files and Directories... ░░ Subject: A start job for unit systemd-tmpfiles-setup.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-tmpfiles-setup.service has begun execution. ░░ ░░ The job identifier is 149. Aug 03 16:38:31 localhost systemd[1]: Starting systemd-udevd.service - Rule-based Manager for Device Events and Files... ░░ Subject: A start job for unit systemd-udevd.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-udevd.service has begun execution. ░░ ░░ The job identifier is 141. Aug 03 16:38:31 localhost systemd-udevd[517]: Using default interface naming scheme 'rhel-10.0'. Aug 03 16:38:31 localhost systemd[1]: Finished systemd-tmpfiles-setup.service - Create System Files and Directories. ░░ Subject: A start job for unit systemd-tmpfiles-setup.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-tmpfiles-setup.service has finished successfully. ░░ ░░ The job identifier is 149. Aug 03 16:38:31 localhost systemd[1]: Mounting var-lib-nfs-rpc_pipefs.mount - RPC Pipe File System... ░░ Subject: A start job for unit var-lib-nfs-rpc_pipefs.mount has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit var-lib-nfs-rpc_pipefs.mount has begun execution. ░░ ░░ The job identifier is 262. Aug 03 16:38:31 localhost systemd[1]: Starting audit-rules.service - Load Audit Rules... ░░ Subject: A start job for unit audit-rules.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit audit-rules.service has begun execution. ░░ ░░ The job identifier is 231. Aug 03 16:38:31 localhost systemd[1]: systemd-firstboot.service - First Boot Wizard was skipped because of an unmet condition check (ConditionFirstBoot=yes). ░░ Subject: A start job for unit systemd-firstboot.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-firstboot.service has finished successfully. ░░ ░░ The job identifier is 133. Aug 03 16:38:31 localhost systemd[1]: first-boot-complete.target - First Boot Complete was skipped because of an unmet condition check (ConditionFirstBoot=yes). ░░ Subject: A start job for unit first-boot-complete.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit first-boot-complete.target has finished successfully. ░░ ░░ The job identifier is 134. Aug 03 16:38:31 localhost systemd[1]: systemd-journal-catalog-update.service - Rebuild Journal Catalog was skipped because of an unmet condition check (ConditionNeedsUpdate=/var). ░░ Subject: A start job for unit systemd-journal-catalog-update.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-journal-catalog-update.service has finished successfully. ░░ ░░ The job identifier is 163. Aug 03 16:38:31 localhost systemd[1]: Starting systemd-machine-id-commit.service - Save Transient machine-id to Disk... ░░ Subject: A start job for unit systemd-machine-id-commit.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-machine-id-commit.service has begun execution. ░░ ░░ The job identifier is 168. Aug 03 16:38:31 localhost systemd[1]: systemd-update-done.service - Update is Completed was skipped because no trigger condition checks were met. ░░ Subject: A start job for unit systemd-update-done.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-update-done.service has finished successfully. ░░ ░░ The job identifier is 185. Aug 03 16:38:31 localhost kernel: RPC: Registered named UNIX socket transport module. Aug 03 16:38:31 localhost kernel: RPC: Registered udp transport module. Aug 03 16:38:31 localhost kernel: RPC: Registered tcp transport module. Aug 03 16:38:31 localhost kernel: RPC: Registered tcp-with-tls transport module. Aug 03 16:38:31 localhost kernel: RPC: Registered tcp NFSv4.1 backchannel transport module. Aug 03 16:38:31 localhost systemd[1]: Mounted var-lib-nfs-rpc_pipefs.mount - RPC Pipe File System. ░░ Subject: A start job for unit var-lib-nfs-rpc_pipefs.mount has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit var-lib-nfs-rpc_pipefs.mount has finished successfully. ░░ ░░ The job identifier is 262. Aug 03 16:38:31 localhost systemd[1]: Reached target rpc_pipefs.target. ░░ Subject: A start job for unit rpc_pipefs.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit rpc_pipefs.target has finished successfully. ░░ ░░ The job identifier is 261. Aug 03 16:38:31 localhost systemd[1]: etc-machine\x2did.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit etc-machine\x2did.mount has successfully entered the 'dead' state. Aug 03 16:38:31 localhost systemd[1]: Finished systemd-machine-id-commit.service - Save Transient machine-id to Disk. ░░ Subject: A start job for unit systemd-machine-id-commit.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-machine-id-commit.service has finished successfully. ░░ ░░ The job identifier is 168. Aug 03 16:38:32 localhost augenrules[521]: /sbin/augenrules: No change Aug 03 16:38:32 localhost augenrules[537]: No rules Aug 03 16:38:32 localhost augenrules[537]: enabled 0 Aug 03 16:38:32 localhost augenrules[537]: failure 1 Aug 03 16:38:32 localhost augenrules[537]: pid 0 Aug 03 16:38:32 localhost augenrules[537]: rate_limit 0 Aug 03 16:38:32 localhost augenrules[537]: backlog_limit 8192 Aug 03 16:38:32 localhost augenrules[537]: lost 0 Aug 03 16:38:32 localhost augenrules[537]: backlog 0 Aug 03 16:38:32 localhost augenrules[537]: backlog_wait_time 60000 Aug 03 16:38:32 localhost augenrules[537]: backlog_wait_time_actual 0 Aug 03 16:38:32 localhost augenrules[537]: enabled 0 Aug 03 16:38:32 localhost augenrules[537]: failure 1 Aug 03 16:38:32 localhost augenrules[537]: pid 0 Aug 03 16:38:32 localhost augenrules[537]: rate_limit 0 Aug 03 16:38:32 localhost augenrules[537]: backlog_limit 8192 Aug 03 16:38:32 localhost augenrules[537]: lost 0 Aug 03 16:38:32 localhost augenrules[537]: backlog 0 Aug 03 16:38:32 localhost augenrules[537]: backlog_wait_time 60000 Aug 03 16:38:32 localhost augenrules[537]: backlog_wait_time_actual 0 Aug 03 16:38:32 localhost augenrules[537]: enabled 0 Aug 03 16:38:32 localhost augenrules[537]: failure 1 Aug 03 16:38:32 localhost augenrules[537]: pid 0 Aug 03 16:38:32 localhost augenrules[537]: rate_limit 0 Aug 03 16:38:32 localhost augenrules[537]: backlog_limit 8192 Aug 03 16:38:32 localhost augenrules[537]: lost 0 Aug 03 16:38:32 localhost augenrules[537]: backlog 0 Aug 03 16:38:32 localhost augenrules[537]: backlog_wait_time 60000 Aug 03 16:38:32 localhost augenrules[537]: backlog_wait_time_actual 0 Aug 03 16:38:32 localhost systemd[1]: audit-rules.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit audit-rules.service has successfully entered the 'dead' state. Aug 03 16:38:32 localhost systemd[1]: Finished audit-rules.service - Load Audit Rules. ░░ Subject: A start job for unit audit-rules.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit audit-rules.service has finished successfully. ░░ ░░ The job identifier is 231. Aug 03 16:38:32 localhost systemd[1]: Starting auditd.service - Security Audit Logging Service... ░░ Subject: A start job for unit auditd.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit auditd.service has begun execution. ░░ ░░ The job identifier is 230. Aug 03 16:38:32 localhost systemd[1]: Started systemd-udevd.service - Rule-based Manager for Device Events and Files. ░░ Subject: A start job for unit systemd-udevd.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-udevd.service has finished successfully. ░░ ░░ The job identifier is 141. Aug 03 16:38:32 localhost systemd[1]: Starting modprobe@configfs.service - Load Kernel Module configfs... ░░ Subject: A start job for unit modprobe@configfs.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit modprobe@configfs.service has begun execution. ░░ ░░ The job identifier is 288. Aug 03 16:38:32 localhost systemd[1]: modprobe@configfs.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit modprobe@configfs.service has successfully entered the 'dead' state. Aug 03 16:38:32 localhost systemd[1]: Finished modprobe@configfs.service - Load Kernel Module configfs. ░░ Subject: A start job for unit modprobe@configfs.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit modprobe@configfs.service has finished successfully. ░░ ░░ The job identifier is 288. Aug 03 16:38:32 localhost systemd[1]: Condition check resulted in dev-ttyS0.device - /dev/ttyS0 being skipped. ░░ Subject: A start job for unit dev-ttyS0.device has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit dev-ttyS0.device has finished successfully. ░░ ░░ The job identifier is 227. Aug 03 16:38:32 localhost 55-scsi-sg3_id.rules[573]: WARNING: SCSI device xvda has no device ID, consider changing .SCSI_ID_SERIAL_SRC in 00-scsi-sg3_config.rules Aug 03 16:38:32 localhost kernel: input: PC Speaker as /devices/platform/pcspkr/input/input5 Aug 03 16:38:32 localhost kernel: piix4_smbus 0000:00:01.3: SMBus base address uninitialized - upgrade BIOS or use force_addr=0xaddr Aug 03 16:38:32 localhost systemd[1]: Starting systemd-vconsole-setup.service - Virtual Console Setup... ░░ Subject: A start job for unit systemd-vconsole-setup.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-vconsole-setup.service has begun execution. ░░ ░░ The job identifier is 294. Aug 03 16:38:32 localhost kernel: RAPL PMU: API unit is 2^-32 Joules, 0 fixed counters, 655360 ms ovfl timer Aug 03 16:38:32 localhost auditd[580]: No plugins found, not dispatching events Aug 03 16:38:32 localhost auditd[580]: Init complete, auditd 4.0 listening for events (startup state enable) Aug 03 16:38:32 localhost systemd[1]: Started auditd.service - Security Audit Logging Service. ░░ Subject: A start job for unit auditd.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit auditd.service has finished successfully. ░░ ░░ The job identifier is 230. Aug 03 16:38:32 localhost systemd[1]: Starting systemd-update-utmp.service - Record System Boot/Shutdown in UTMP... ░░ Subject: A start job for unit systemd-update-utmp.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-update-utmp.service has begun execution. ░░ ░░ The job identifier is 251. Aug 03 16:38:32 localhost (udev-worker)[555]: Network interface NamePolicy= disabled on kernel command line. Aug 03 16:38:32 localhost kernel: cirrus 0000:00:02.0: vgaarb: deactivate vga console Aug 03 16:38:32 localhost kernel: Console: switching to colour dummy device 80x25 Aug 03 16:38:32 localhost kernel: [drm] Initialized cirrus 2.0.0 2019 for 0000:00:02.0 on minor 0 Aug 03 16:38:32 localhost kernel: fbcon: cirrusdrmfb (fb0) is primary device Aug 03 16:38:32 localhost kernel: Console: switching to colour frame buffer device 128x48 Aug 03 16:38:32 localhost kernel: cirrus 0000:00:02.0: [drm] fb0: cirrusdrmfb frame buffer device Aug 03 16:38:32 localhost systemd[1]: systemd-vconsole-setup.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-vconsole-setup.service has successfully entered the 'dead' state. Aug 03 16:38:32 localhost systemd[1]: Stopped systemd-vconsole-setup.service - Virtual Console Setup. ░░ Subject: A stop job for unit systemd-vconsole-setup.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit systemd-vconsole-setup.service has finished. ░░ ░░ The job identifier is 294 and the job result is done. Aug 03 16:38:32 localhost systemd[1]: run-credentials-systemd\x2dvconsole\x2dsetup.service.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-credentials-systemd\x2dvconsole\x2dsetup.service.mount has successfully entered the 'dead' state. Aug 03 16:38:32 localhost systemd[1]: Starting systemd-vconsole-setup.service - Virtual Console Setup... ░░ Subject: A start job for unit systemd-vconsole-setup.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-vconsole-setup.service has begun execution. ░░ ░░ The job identifier is 294. Aug 03 16:38:32 localhost systemd[1]: Finished systemd-update-utmp.service - Record System Boot/Shutdown in UTMP. ░░ Subject: A start job for unit systemd-update-utmp.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-update-utmp.service has finished successfully. ░░ ░░ The job identifier is 251. Aug 03 16:38:33 localhost systemd[1]: Finished systemd-vconsole-setup.service - Virtual Console Setup. ░░ Subject: A start job for unit systemd-vconsole-setup.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-vconsole-setup.service has finished successfully. ░░ ░░ The job identifier is 294. Aug 03 16:38:33 localhost systemd[1]: Reached target sysinit.target - System Initialization. ░░ Subject: A start job for unit sysinit.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sysinit.target has finished successfully. ░░ ░░ The job identifier is 120. Aug 03 16:38:33 localhost systemd[1]: Started dnf-makecache.timer - dnf makecache --timer. ░░ Subject: A start job for unit dnf-makecache.timer has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit dnf-makecache.timer has finished successfully. ░░ ░░ The job identifier is 198. Aug 03 16:38:33 localhost systemd[1]: Started fstrim.timer - Discard unused filesystem blocks once a week. ░░ Subject: A start job for unit fstrim.timer has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit fstrim.timer has finished successfully. ░░ ░░ The job identifier is 206. Aug 03 16:38:33 localhost systemd[1]: Started logrotate.timer - Daily rotation of log files. ░░ Subject: A start job for unit logrotate.timer has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit logrotate.timer has finished successfully. ░░ ░░ The job identifier is 207. Aug 03 16:38:33 localhost systemd[1]: Started systemd-tmpfiles-clean.timer - Daily Cleanup of Temporary Directories. ░░ Subject: A start job for unit systemd-tmpfiles-clean.timer has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-tmpfiles-clean.timer has finished successfully. ░░ ░░ The job identifier is 205. Aug 03 16:38:33 localhost systemd[1]: Started unbound-anchor.timer - daily update of the root trust anchor for DNSSEC. ░░ Subject: A start job for unit unbound-anchor.timer has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit unbound-anchor.timer has finished successfully. ░░ ░░ The job identifier is 208. Aug 03 16:38:33 localhost systemd[1]: Reached target timers.target - Timer Units. ░░ Subject: A start job for unit timers.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit timers.target has finished successfully. ░░ ░░ The job identifier is 197. Aug 03 16:38:33 localhost systemd[1]: Listening on dbus.socket - D-Bus System Message Bus Socket. ░░ Subject: A start job for unit dbus.socket has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit dbus.socket has finished successfully. ░░ ░░ The job identifier is 202. Aug 03 16:38:33 localhost systemd[1]: Listening on pcscd.socket - PC/SC Smart Card Daemon Activation Socket. ░░ Subject: A start job for unit pcscd.socket has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pcscd.socket has finished successfully. ░░ ░░ The job identifier is 216. Aug 03 16:38:33 localhost systemd[1]: Listening on sssd-kcm.socket - SSSD Kerberos Cache Manager responder socket. ░░ Subject: A start job for unit sssd-kcm.socket has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sssd-kcm.socket has finished successfully. ░░ ░░ The job identifier is 219. Aug 03 16:38:33 localhost systemd[1]: Listening on systemd-hostnamed.socket - Hostname Service Socket. ░░ Subject: A start job for unit systemd-hostnamed.socket has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-hostnamed.socket has finished successfully. ░░ ░░ The job identifier is 215. Aug 03 16:38:33 localhost systemd[1]: Reached target sockets.target - Socket Units. ░░ Subject: A start job for unit sockets.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sockets.target has finished successfully. ░░ ░░ The job identifier is 210. Aug 03 16:38:33 localhost systemd[1]: Starting dbus-broker.service - D-Bus System Message Bus... ░░ Subject: A start job for unit dbus-broker.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit dbus-broker.service has begun execution. ░░ ░░ The job identifier is 203. Aug 03 16:38:33 localhost systemd[1]: systemd-pcrphase-sysinit.service - TPM PCR Barrier (Initialization) was skipped because of an unmet condition check (ConditionSecurity=measured-uki). ░░ Subject: A start job for unit systemd-pcrphase-sysinit.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-pcrphase-sysinit.service has finished successfully. ░░ ░░ The job identifier is 132. Aug 03 16:38:33 localhost systemd[1]: Started dbus-broker.service - D-Bus System Message Bus. ░░ Subject: A start job for unit dbus-broker.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit dbus-broker.service has finished successfully. ░░ ░░ The job identifier is 203. Aug 03 16:38:33 localhost systemd[1]: Reached target basic.target - Basic System. ░░ Subject: A start job for unit basic.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit basic.target has finished successfully. ░░ ░░ The job identifier is 119. Aug 03 16:38:33 localhost dbus-broker-launch[602]: Ready Aug 03 16:38:34 localhost systemd[1]: Starting chronyd.service - NTP client/server... ░░ Subject: A start job for unit chronyd.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit chronyd.service has begun execution. ░░ ░░ The job identifier is 247. Aug 03 16:38:34 localhost systemd[1]: Starting cloud-init-local.service - Initial cloud-init job (pre-networking)... ░░ Subject: A start job for unit cloud-init-local.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit cloud-init-local.service has begun execution. ░░ ░░ The job identifier is 234. Aug 03 16:38:34 localhost systemd[1]: Starting dracut-shutdown.service - Restore /run/initramfs on shutdown... ░░ Subject: A start job for unit dracut-shutdown.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit dracut-shutdown.service has begun execution. ░░ ░░ The job identifier is 184. Aug 03 16:38:34 localhost systemd[1]: Started irqbalance.service - irqbalance daemon. ░░ Subject: A start job for unit irqbalance.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit irqbalance.service has finished successfully. ░░ ░░ The job identifier is 232. Aug 03 16:38:34 localhost (qbalance)[607]: irqbalance.service: Referenced but unset environment variable evaluates to an empty string: IRQBALANCE_ARGS Aug 03 16:38:34 localhost systemd[1]: Started rngd.service - Hardware RNG Entropy Gatherer Daemon. ░░ Subject: A start job for unit rngd.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit rngd.service has finished successfully. ░░ ░░ The job identifier is 269. Aug 03 16:38:34 localhost systemd[1]: Starting rsyslog.service - System Logging Service... ░░ Subject: A start job for unit rsyslog.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit rsyslog.service has begun execution. ░░ ░░ The job identifier is 268. Aug 03 16:38:34 localhost systemd[1]: ssh-host-keys-migration.service - Update OpenSSH host key permissions was skipped because of an unmet condition check (ConditionPathExists=!/var/lib/.ssh-host-keys-migration). ░░ Subject: A start job for unit ssh-host-keys-migration.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit ssh-host-keys-migration.service has finished successfully. ░░ ░░ The job identifier is 237. Aug 03 16:38:34 localhost systemd[1]: sshd-keygen@ecdsa.service - OpenSSH ecdsa Server Key Generation was skipped because of an unmet condition check (ConditionPathExists=!/run/systemd/generator.early/multi-user.target.wants/cloud-init.target). ░░ Subject: A start job for unit sshd-keygen@ecdsa.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sshd-keygen@ecdsa.service has finished successfully. ░░ ░░ The job identifier is 241. Aug 03 16:38:34 localhost systemd[1]: sshd-keygen@ed25519.service - OpenSSH ed25519 Server Key Generation was skipped because of an unmet condition check (ConditionPathExists=!/run/systemd/generator.early/multi-user.target.wants/cloud-init.target). ░░ Subject: A start job for unit sshd-keygen@ed25519.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sshd-keygen@ed25519.service has finished successfully. ░░ ░░ The job identifier is 239. Aug 03 16:38:34 localhost systemd[1]: sshd-keygen@rsa.service - OpenSSH rsa Server Key Generation was skipped because of an unmet condition check (ConditionPathExists=!/run/systemd/generator.early/multi-user.target.wants/cloud-init.target). ░░ Subject: A start job for unit sshd-keygen@rsa.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sshd-keygen@rsa.service has finished successfully. ░░ ░░ The job identifier is 242. Aug 03 16:38:34 localhost systemd[1]: Reached target sshd-keygen.target. ░░ Subject: A start job for unit sshd-keygen.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sshd-keygen.target has finished successfully. ░░ ░░ The job identifier is 238. Aug 03 16:38:34 localhost systemd[1]: sssd.service - System Security Services Daemon was skipped because no trigger condition checks were met. ░░ Subject: A start job for unit sssd.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sssd.service has finished successfully. ░░ ░░ The job identifier is 270. Aug 03 16:38:34 localhost systemd[1]: Reached target nss-user-lookup.target - User and Group Name Lookups. ░░ Subject: A start job for unit nss-user-lookup.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit nss-user-lookup.target has finished successfully. ░░ ░░ The job identifier is 271. Aug 03 16:38:34 localhost systemd[1]: Starting systemd-logind.service - User Login Management... ░░ Subject: A start job for unit systemd-logind.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-logind.service has begun execution. ░░ ░░ The job identifier is 252. Aug 03 16:38:34 localhost systemd-logind[612]: New seat seat0. ░░ Subject: A new seat seat0 is now available ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A new seat seat0 has been configured and is now available. Aug 03 16:38:34 localhost systemd-logind[612]: Watching system buttons on /dev/input/event0 (Power Button) Aug 03 16:38:34 localhost systemd-logind[612]: Watching system buttons on /dev/input/event1 (Sleep Button) Aug 03 16:38:34 localhost systemd-logind[612]: Watching system buttons on /dev/input/event2 (AT Translated Set 2 keyboard) Aug 03 16:38:34 localhost systemd[1]: Starting unbound-anchor.service - update of the root trust anchor for DNSSEC validation in unbound... ░░ Subject: A start job for unit unbound-anchor.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit unbound-anchor.service has begun execution. ░░ ░░ The job identifier is 303. Aug 03 16:38:34 localhost systemd[1]: Started systemd-logind.service - User Login Management. ░░ Subject: A start job for unit systemd-logind.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-logind.service has finished successfully. ░░ ░░ The job identifier is 252. Aug 03 16:38:34 localhost systemd[1]: Finished dracut-shutdown.service - Restore /run/initramfs on shutdown. ░░ Subject: A start job for unit dracut-shutdown.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit dracut-shutdown.service has finished successfully. ░░ ░░ The job identifier is 184. Aug 03 16:38:34 localhost rsyslogd[611]: imjournal: filecreatemode is not set, using default 0644 [v8.2312.0-2.el10 try https://www.rsyslog.com/e/2186 ] Aug 03 16:38:34 localhost rsyslogd[611]: [origin software="rsyslogd" swVersion="8.2312.0-2.el10" x-pid="611" x-info="https://www.rsyslog.com"] start Aug 03 16:38:34 localhost systemd[1]: Started rsyslog.service - System Logging Service. ░░ Subject: A start job for unit rsyslog.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit rsyslog.service has finished successfully. ░░ ░░ The job identifier is 268. Aug 03 16:38:34 localhost rsyslogd[611]: imjournal: journal files changed, reloading... [v8.2312.0-2.el10 try https://www.rsyslog.com/e/0 ] Aug 03 16:38:34 localhost rngd[610]: Disabling 7: PKCS11 Entropy generator (pkcs11) Aug 03 16:38:34 localhost rngd[610]: Disabling 5: NIST Network Entropy Beacon (nist) Aug 03 16:38:34 localhost rngd[610]: Disabling 9: Qrypt quantum entropy beacon (qrypt) Aug 03 16:38:34 localhost rngd[610]: Disabling 10: Named pipe entropy input (namedpipe) Aug 03 16:38:34 localhost rngd[610]: Initializing available sources Aug 03 16:38:34 localhost rngd[610]: [hwrng ]: Initialization Failed Aug 03 16:38:34 localhost rngd[610]: [rdrand]: Enabling RDRAND rng support Aug 03 16:38:34 localhost rngd[610]: [rdrand]: Initialized Aug 03 16:38:34 localhost rngd[610]: [jitter]: JITTER timeout set to 5 sec Aug 03 16:38:34 localhost rngd[610]: [jitter]: Initializing AES buffer Aug 03 16:38:34 localhost chronyd[631]: chronyd version 4.5 starting (+CMDMON +NTP +REFCLOCK +RTC +PRIVDROP +SCFILTER +SIGND +ASYNCDNS +NTS +SECHASH +IPV6 +DEBUG) Aug 03 16:38:34 localhost chronyd[631]: Frequency 0.000 +/- 1000000.000 ppm read from /var/lib/chrony/drift Aug 03 16:38:34 localhost systemd[1]: unbound-anchor.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit unbound-anchor.service has successfully entered the 'dead' state. Aug 03 16:38:34 localhost systemd[1]: Finished unbound-anchor.service - update of the root trust anchor for DNSSEC validation in unbound. ░░ Subject: A start job for unit unbound-anchor.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit unbound-anchor.service has finished successfully. ░░ ░░ The job identifier is 303. Aug 03 16:38:35 localhost chronyd[631]: Using right/UTC timezone to obtain leap second data Aug 03 16:38:35 localhost chronyd[631]: Loaded seccomp filter (level 2) Aug 03 16:38:35 localhost systemd[1]: Started chronyd.service - NTP client/server. ░░ Subject: A start job for unit chronyd.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit chronyd.service has finished successfully. ░░ ░░ The job identifier is 247. Aug 03 16:38:39 localhost rngd[610]: [jitter]: Unable to obtain AES key, disabling JITTER source Aug 03 16:38:39 localhost rngd[610]: [jitter]: Initialization Failed Aug 03 16:38:39 localhost rngd[610]: Process privileges have been dropped to 2:2 Aug 03 16:38:41 localhost cloud-init[637]: Cloud-init v. 24.1.4-14.el10 running 'init-local' at Sat, 03 Aug 2024 16:38:41 +0000. Up 28.83 seconds. Aug 03 16:38:42 localhost dhcpcd[639]: dhcpcd-10.0.6 starting Aug 03 16:38:42 localhost kernel: 8021q: 802.1Q VLAN Support v1.8 Aug 03 16:38:42 localhost systemd[1]: Listening on systemd-rfkill.socket - Load/Save RF Kill Switch Status /dev/rfkill Watch. ░░ Subject: A start job for unit systemd-rfkill.socket has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-rfkill.socket has finished successfully. ░░ ░░ The job identifier is 382. Aug 03 16:38:42 localhost kernel: cfg80211: Loading compiled-in X.509 certificates for regulatory database Aug 03 16:38:42 localhost kernel: Loaded X.509 cert 'sforshee: 00b28ddf47aef9cea7' Aug 03 16:38:42 localhost kernel: Loaded X.509 cert 'wens: 61c038651aabdcf94bd0ac7ff06c7248db18c600' Aug 03 16:38:42 localhost dhcpcd[642]: DUID 00:01:00:01:2e:41:19:92:0a:ff:ca:de:37:19 Aug 03 16:38:42 localhost dhcpcd[642]: eth0: IAID ca:de:37:19 Aug 03 16:38:43 localhost kernel: platform regulatory.0: Direct firmware load for regulatory.db failed with error -2 Aug 03 16:38:43 localhost kernel: cfg80211: failed to load regulatory.db Aug 03 16:38:43 localhost dhcpcd[642]: eth0: soliciting a DHCP lease Aug 03 16:38:44 localhost dhcpcd[642]: eth0: offered 10.31.15.127 from 10.31.12.1 Aug 03 16:38:44 localhost dhcpcd[642]: eth0: leased 10.31.15.127 for 3600 seconds Aug 03 16:38:44 localhost dhcpcd[642]: eth0: adding route to 10.31.12.0/22 Aug 03 16:38:44 localhost dhcpcd[642]: eth0: adding default route via 10.31.12.1 Aug 03 16:38:44 localhost dhcpcd[642]: control command: /usr/sbin/dhcpcd --dumplease --ipv4only eth0 Aug 03 16:38:44 localhost systemd[1]: Starting systemd-hostnamed.service - Hostname Service... ░░ Subject: A start job for unit systemd-hostnamed.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-hostnamed.service has begun execution. ░░ ░░ The job identifier is 391. Aug 03 16:38:44 localhost systemd[1]: Started systemd-hostnamed.service - Hostname Service. ░░ Subject: A start job for unit systemd-hostnamed.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-hostnamed.service has finished successfully. ░░ ░░ The job identifier is 391. Aug 03 16:38:44 ip-10-31-15-127.us-east-1.aws.redhat.com systemd-hostnamed[661]: Hostname set to (static) Aug 03 16:38:44 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Finished cloud-init-local.service - Initial cloud-init job (pre-networking). ░░ Subject: A start job for unit cloud-init-local.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit cloud-init-local.service has finished successfully. ░░ ░░ The job identifier is 234. Aug 03 16:38:44 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Reached target network-pre.target - Preparation for Network. ░░ Subject: A start job for unit network-pre.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit network-pre.target has finished successfully. ░░ ░░ The job identifier is 173. Aug 03 16:38:44 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Starting NetworkManager.service - Network Manager... ░░ Subject: A start job for unit NetworkManager.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager.service has begun execution. ░░ ░░ The job identifier is 201. Aug 03 16:38:44 ip-10-31-15-127.us-east-1.aws.redhat.com irqbalance[607]: Cannot change IRQ 0 affinity: Input/output error Aug 03 16:38:44 ip-10-31-15-127.us-east-1.aws.redhat.com irqbalance[607]: IRQ 0 affinity is now unmanaged Aug 03 16:38:44 ip-10-31-15-127.us-east-1.aws.redhat.com irqbalance[607]: Cannot change IRQ 48 affinity: Input/output error Aug 03 16:38:44 ip-10-31-15-127.us-east-1.aws.redhat.com irqbalance[607]: IRQ 48 affinity is now unmanaged Aug 03 16:38:44 ip-10-31-15-127.us-east-1.aws.redhat.com irqbalance[607]: Cannot change IRQ 49 affinity: Input/output error Aug 03 16:38:44 ip-10-31-15-127.us-east-1.aws.redhat.com irqbalance[607]: IRQ 49 affinity is now unmanaged Aug 03 16:38:44 ip-10-31-15-127.us-east-1.aws.redhat.com irqbalance[607]: Cannot change IRQ 50 affinity: Input/output error Aug 03 16:38:44 ip-10-31-15-127.us-east-1.aws.redhat.com irqbalance[607]: IRQ 50 affinity is now unmanaged Aug 03 16:38:44 ip-10-31-15-127.us-east-1.aws.redhat.com irqbalance[607]: Cannot change IRQ 51 affinity: Input/output error Aug 03 16:38:44 ip-10-31-15-127.us-east-1.aws.redhat.com irqbalance[607]: IRQ 51 affinity is now unmanaged Aug 03 16:38:44 ip-10-31-15-127.us-east-1.aws.redhat.com irqbalance[607]: Cannot change IRQ 52 affinity: Input/output error Aug 03 16:38:44 ip-10-31-15-127.us-east-1.aws.redhat.com irqbalance[607]: IRQ 52 affinity is now unmanaged Aug 03 16:38:44 ip-10-31-15-127.us-east-1.aws.redhat.com irqbalance[607]: Cannot change IRQ 53 affinity: Input/output error Aug 03 16:38:44 ip-10-31-15-127.us-east-1.aws.redhat.com irqbalance[607]: IRQ 53 affinity is now unmanaged Aug 03 16:38:44 ip-10-31-15-127.us-east-1.aws.redhat.com irqbalance[607]: Cannot change IRQ 54 affinity: Input/output error Aug 03 16:38:44 ip-10-31-15-127.us-east-1.aws.redhat.com irqbalance[607]: IRQ 54 affinity is now unmanaged Aug 03 16:38:44 ip-10-31-15-127.us-east-1.aws.redhat.com irqbalance[607]: Cannot change IRQ 55 affinity: Input/output error Aug 03 16:38:44 ip-10-31-15-127.us-east-1.aws.redhat.com irqbalance[607]: IRQ 55 affinity is now unmanaged Aug 03 16:38:44 ip-10-31-15-127.us-east-1.aws.redhat.com irqbalance[607]: Cannot change IRQ 56 affinity: Input/output error Aug 03 16:38:44 ip-10-31-15-127.us-east-1.aws.redhat.com irqbalance[607]: IRQ 56 affinity is now unmanaged Aug 03 16:38:44 ip-10-31-15-127.us-east-1.aws.redhat.com irqbalance[607]: Cannot change IRQ 57 affinity: Input/output error Aug 03 16:38:44 ip-10-31-15-127.us-east-1.aws.redhat.com irqbalance[607]: IRQ 57 affinity is now unmanaged Aug 03 16:38:44 ip-10-31-15-127.us-east-1.aws.redhat.com irqbalance[607]: Cannot change IRQ 58 affinity: Input/output error Aug 03 16:38:44 ip-10-31-15-127.us-east-1.aws.redhat.com irqbalance[607]: IRQ 58 affinity is now unmanaged Aug 03 16:38:44 ip-10-31-15-127.us-east-1.aws.redhat.com irqbalance[607]: Cannot change IRQ 59 affinity: Input/output error Aug 03 16:38:44 ip-10-31-15-127.us-east-1.aws.redhat.com irqbalance[607]: IRQ 59 affinity is now unmanaged Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com NetworkManager[668]: [1722703125.2148] NetworkManager (version 1.48.4-1.el10.1) is starting... (boot:f918ba53-6683-4114-ab94-acdfb1690bf8) Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com NetworkManager[668]: [1722703125.2150] Read config: /etc/NetworkManager/NetworkManager.conf (etc: 30-cloud-init-ip6-addr-gen-mode.conf) Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com NetworkManager[668]: [1722703125.3033] manager[0x55b947d06a10]: monitoring kernel firmware directory '/lib/firmware'. Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com NetworkManager[668]: [1722703125.3069] hostname: hostname: using hostnamed Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com NetworkManager[668]: [1722703125.3069] hostname: static hostname changed from (none) to "ip-10-31-15-127.us-east-1.aws.redhat.com" Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com NetworkManager[668]: [1722703125.3073] dns-mgr: init: dns=default,systemd-resolved rc-manager=symlink (auto) Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com NetworkManager[668]: [1722703125.3077] manager[0x55b947d06a10]: rfkill: Wi-Fi hardware radio set enabled Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com NetworkManager[668]: [1722703125.3077] manager[0x55b947d06a10]: rfkill: WWAN hardware radio set enabled Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com NetworkManager[668]: [1722703125.3111] manager: rfkill: Wi-Fi enabled by radio killswitch; enabled by state file Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com NetworkManager[668]: [1722703125.3112] manager: rfkill: WWAN enabled by radio killswitch; enabled by state file Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com NetworkManager[668]: [1722703125.3112] manager: Networking is enabled by state file Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com NetworkManager[668]: [1722703125.3131] settings: Loaded settings plugin: keyfile (internal) Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com NetworkManager[668]: [1722703125.3189] dhcp: init: Using DHCP client 'internal' Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com NetworkManager[668]: [1722703125.3194] manager: (lo): new Loopback device (/org/freedesktop/NetworkManager/Devices/1) Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Starting NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service... ░░ Subject: A start job for unit NetworkManager-dispatcher.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has begun execution. ░░ ░░ The job identifier is 468. Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com NetworkManager[668]: [1722703125.3219] device (lo): state change: unmanaged -> unavailable (reason 'connection-assumed', sys-iface-state: 'external') Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com NetworkManager[668]: [1722703125.3231] device (lo): state change: unavailable -> disconnected (reason 'connection-assumed', sys-iface-state: 'external') Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com NetworkManager[668]: [1722703125.3237] device (lo): Activation: starting connection 'lo' (b29afc40-5a84-42e4-9b3d-3192ff420e5d) Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com NetworkManager[668]: [1722703125.3246] manager: (eth0): new Ethernet device (/org/freedesktop/NetworkManager/Devices/2) Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com NetworkManager[668]: [1722703125.3250] device (eth0): state change: unmanaged -> unavailable (reason 'managed', sys-iface-state: 'external') Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Started NetworkManager.service - Network Manager. ░░ Subject: A start job for unit NetworkManager.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager.service has finished successfully. ░░ ░░ The job identifier is 201. Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com NetworkManager[668]: [1722703125.3282] bus-manager: acquired D-Bus service "org.freedesktop.NetworkManager" Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Reached target network.target - Network. ░░ Subject: A start job for unit network.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit network.target has finished successfully. ░░ ░░ The job identifier is 204. Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com NetworkManager[668]: [1722703125.3287] device (lo): state change: disconnected -> prepare (reason 'none', sys-iface-state: 'external') Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com NetworkManager[668]: [1722703125.3289] device (lo): state change: prepare -> config (reason 'none', sys-iface-state: 'external') Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com NetworkManager[668]: [1722703125.3291] device (lo): state change: config -> ip-config (reason 'none', sys-iface-state: 'external') Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com NetworkManager[668]: [1722703125.3292] device (eth0): carrier: link connected Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com NetworkManager[668]: [1722703125.3294] device (lo): state change: ip-config -> ip-check (reason 'none', sys-iface-state: 'external') Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com NetworkManager[668]: [1722703125.3298] device (eth0): state change: unavailable -> disconnected (reason 'carrier-changed', sys-iface-state: 'managed') Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com NetworkManager[668]: [1722703125.3302] policy: auto-activating connection 'cloud-init eth0' (1dd9a779-d327-56e1-8454-c65e2556c12c) Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com NetworkManager[668]: [1722703125.3318] device (eth0): Activation: starting connection 'cloud-init eth0' (1dd9a779-d327-56e1-8454-c65e2556c12c) Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com NetworkManager[668]: [1722703125.3321] device (eth0): state change: disconnected -> prepare (reason 'none', sys-iface-state: 'managed') Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com NetworkManager[668]: [1722703125.3324] manager: NetworkManager state is now CONNECTING Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com NetworkManager[668]: [1722703125.3329] device (eth0): state change: prepare -> config (reason 'none', sys-iface-state: 'managed') Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com NetworkManager[668]: [1722703125.3337] device (eth0): state change: config -> ip-config (reason 'none', sys-iface-state: 'managed') Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com NetworkManager[668]: [1722703125.3342] dhcp4 (eth0): activation: beginning transaction (timeout in 45 seconds) Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com NetworkManager[668]: [1722703125.3356] dhcp4 (eth0): state changed new lease, address=10.31.15.127, acd pending Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Starting NetworkManager-wait-online.service - Network Manager Wait Online... ░░ Subject: A start job for unit NetworkManager-wait-online.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-wait-online.service has begun execution. ░░ ░░ The job identifier is 200. Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Starting gssproxy.service - GSSAPI Proxy Daemon... ░░ Subject: A start job for unit gssproxy.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit gssproxy.service has begun execution. ░░ ░░ The job identifier is 264. Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com NetworkManager[668]: [1722703125.4614] dhcp4 (eth0): state changed new lease, address=10.31.15.127 Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com NetworkManager[668]: [1722703125.4619] policy: set 'cloud-init eth0' (eth0) as default for IPv4 routing and DNS Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com NetworkManager[668]: [1722703125.5524] device (eth0): state change: ip-config -> ip-check (reason 'none', sys-iface-state: 'managed') Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Started gssproxy.service - GSSAPI Proxy Daemon. ░░ Subject: A start job for unit gssproxy.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit gssproxy.service has finished successfully. ░░ ░░ The job identifier is 264. Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: rpc-gssd.service - RPC security service for NFS client and server was skipped because of an unmet condition check (ConditionPathExists=/etc/krb5.keytab). ░░ Subject: A start job for unit rpc-gssd.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit rpc-gssd.service has finished successfully. ░░ ░░ The job identifier is 260. Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Reached target nfs-client.target - NFS client services. ░░ Subject: A start job for unit nfs-client.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit nfs-client.target has finished successfully. ░░ ░░ The job identifier is 257. Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Reached target remote-fs-pre.target - Preparation for Remote File Systems. ░░ Subject: A start job for unit remote-fs-pre.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit remote-fs-pre.target has finished successfully. ░░ ░░ The job identifier is 265. Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Reached target remote-cryptsetup.target - Remote Encrypted Volumes. ░░ Subject: A start job for unit remote-cryptsetup.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit remote-cryptsetup.target has finished successfully. ░░ ░░ The job identifier is 246. Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Reached target remote-fs.target - Remote File Systems. ░░ Subject: A start job for unit remote-fs.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit remote-fs.target has finished successfully. ░░ ░░ The job identifier is 256. Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: systemd-pcrphase.service - TPM PCR Barrier (User) was skipped because of an unmet condition check (ConditionSecurity=measured-uki). ░░ Subject: A start job for unit systemd-pcrphase.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-pcrphase.service has finished successfully. ░░ ░░ The job identifier is 186. Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Started NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service. ░░ Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has finished successfully. ░░ ░░ The job identifier is 468. Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com NetworkManager[668]: [1722703125.7986] device (lo): state change: ip-check -> secondaries (reason 'none', sys-iface-state: 'external') Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com NetworkManager[668]: [1722703125.7991] device (lo): state change: secondaries -> activated (reason 'none', sys-iface-state: 'external') Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com NetworkManager[668]: [1722703125.7995] device (lo): Activation: successful, device activated. Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com NetworkManager[668]: [1722703125.8001] device (eth0): state change: ip-check -> secondaries (reason 'none', sys-iface-state: 'managed') Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com NetworkManager[668]: [1722703125.8002] device (eth0): state change: secondaries -> activated (reason 'none', sys-iface-state: 'managed') Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com NetworkManager[668]: [1722703125.8005] manager: NetworkManager state is now CONNECTED_SITE Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com NetworkManager[668]: [1722703125.8007] device (eth0): Activation: successful, device activated. Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com NetworkManager[668]: [1722703125.8012] manager: NetworkManager state is now CONNECTED_GLOBAL Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com NetworkManager[668]: [1722703125.8021] manager: startup complete Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Finished NetworkManager-wait-online.service - Network Manager Wait Online. ░░ Subject: A start job for unit NetworkManager-wait-online.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-wait-online.service has finished successfully. ░░ ░░ The job identifier is 200. Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Starting cloud-init.service - Initial cloud-init job (metadata service crawler)... ░░ Subject: A start job for unit cloud-init.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit cloud-init.service has begun execution. ░░ ░░ The job identifier is 235. Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com chronyd[631]: Added source 10.11.160.238 Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com chronyd[631]: Added source 10.18.100.10 Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com chronyd[631]: Added source 10.2.32.37 Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com chronyd[631]: Added source 10.2.32.38 Aug 03 16:38:46 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: Cloud-init v. 24.1.4-14.el10 running 'init' at Sat, 03 Aug 2024 16:38:46 +0000. Up 33.27 seconds. Aug 03 16:38:46 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: ci-info: ++++++++++++++++++++++++++++++++++++++Net device info+++++++++++++++++++++++++++++++++++++++ Aug 03 16:38:46 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: ci-info: +--------+------+-----------------------------+---------------+--------+-------------------+ Aug 03 16:38:46 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: ci-info: | Device | Up | Address | Mask | Scope | Hw-Address | Aug 03 16:38:46 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: ci-info: +--------+------+-----------------------------+---------------+--------+-------------------+ Aug 03 16:38:46 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: ci-info: | eth0 | True | 10.31.15.127 | 255.255.252.0 | global | 0a:ff:ca:de:37:19 | Aug 03 16:38:46 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: ci-info: | eth0 | True | fe80::8ff:caff:fede:3719/64 | . | link | 0a:ff:ca:de:37:19 | Aug 03 16:38:46 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: ci-info: | lo | True | 127.0.0.1 | 255.0.0.0 | host | . | Aug 03 16:38:46 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: ci-info: | lo | True | ::1/128 | . | host | . | Aug 03 16:38:46 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: ci-info: +--------+------+-----------------------------+---------------+--------+-------------------+ Aug 03 16:38:46 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: ci-info: ++++++++++++++++++++++++++++Route IPv4 info+++++++++++++++++++++++++++++ Aug 03 16:38:46 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: ci-info: +-------+-------------+------------+---------------+-----------+-------+ Aug 03 16:38:46 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: ci-info: | Route | Destination | Gateway | Genmask | Interface | Flags | Aug 03 16:38:46 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: ci-info: +-------+-------------+------------+---------------+-----------+-------+ Aug 03 16:38:46 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: ci-info: | 0 | 0.0.0.0 | 10.31.12.1 | 0.0.0.0 | eth0 | UG | Aug 03 16:38:46 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: ci-info: | 1 | 10.31.12.0 | 0.0.0.0 | 255.255.252.0 | eth0 | U | Aug 03 16:38:46 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: ci-info: +-------+-------------+------------+---------------+-----------+-------+ Aug 03 16:38:46 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: ci-info: +++++++++++++++++++Route IPv6 info+++++++++++++++++++ Aug 03 16:38:46 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: ci-info: +-------+-------------+---------+-----------+-------+ Aug 03 16:38:46 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: ci-info: | Route | Destination | Gateway | Interface | Flags | Aug 03 16:38:46 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: ci-info: +-------+-------------+---------+-----------+-------+ Aug 03 16:38:46 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: ci-info: | 0 | fe80::/64 | :: | eth0 | U | Aug 03 16:38:46 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: ci-info: | 2 | multicast | :: | eth0 | U | Aug 03 16:38:46 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: ci-info: +-------+-------------+---------+-----------+-------+ Aug 03 16:38:46 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: 2024-08-03 16:38:46,245 - handlers[WARNING]: Unhandled non-multipart (text/x-not-multipart) userdata: 'b'~/.citool.d/post-install'...' Aug 03 16:38:46 ip-10-31-15-127.us-east-1.aws.redhat.com 55-scsi-sg3_id.rules[819]: WARNING: SCSI device xvda has no device ID, consider changing .SCSI_ID_SERIAL_SRC in 00-scsi-sg3_config.rules Aug 03 16:38:46 ip-10-31-15-127.us-east-1.aws.redhat.com 55-scsi-sg3_id.rules[822]: WARNING: SCSI device xvda has no device ID, consider changing .SCSI_ID_SERIAL_SRC in 00-scsi-sg3_config.rules Aug 03 16:38:47 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: Generating public/private rsa key pair. Aug 03 16:38:47 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: Your identification has been saved in /etc/ssh/ssh_host_rsa_key Aug 03 16:38:47 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: Your public key has been saved in /etc/ssh/ssh_host_rsa_key.pub Aug 03 16:38:47 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: The key fingerprint is: Aug 03 16:38:47 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: SHA256:ZvesGiprDsEtUfJUwK3IbWOK393/5+Op2ojKDN75/FU root@ip-10-31-15-127.us-east-1.aws.redhat.com Aug 03 16:38:47 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: The key's randomart image is: Aug 03 16:38:47 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: +---[RSA 3072]----+ Aug 03 16:38:47 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: | ..++. | Aug 03 16:38:47 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: | =. . | Aug 03 16:38:47 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: | ..o.. | Aug 03 16:38:47 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: | .oo* | Aug 03 16:38:47 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: | .++.. S . E | Aug 03 16:38:47 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: |. .o o . o . | Aug 03 16:38:47 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: | ..... .. + | Aug 03 16:38:47 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: | .o+=.+.o = o. | Aug 03 16:38:47 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: | o+oBo=+*o+=+. | Aug 03 16:38:47 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: +----[SHA256]-----+ Aug 03 16:38:47 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: Generating public/private ecdsa key pair. Aug 03 16:38:47 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: Your identification has been saved in /etc/ssh/ssh_host_ecdsa_key Aug 03 16:38:47 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: Your public key has been saved in /etc/ssh/ssh_host_ecdsa_key.pub Aug 03 16:38:47 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: The key fingerprint is: Aug 03 16:38:47 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: SHA256:jE4mUbXoU7JTJPc3vhFSSUGyU5Fge2eMsO0gNuq8XGQ root@ip-10-31-15-127.us-east-1.aws.redhat.com Aug 03 16:38:47 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: The key's randomart image is: Aug 03 16:38:47 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: +---[ECDSA 256]---+ Aug 03 16:38:47 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: | o.+ =+B= | Aug 03 16:38:47 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: | . = + Xoo | Aug 03 16:38:47 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: | . o B O B + | Aug 03 16:38:47 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: | o X o O = | Aug 03 16:38:47 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: | . X E + | Aug 03 16:38:47 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: | B = o | Aug 03 16:38:47 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: | + . . | Aug 03 16:38:47 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: | . o | Aug 03 16:38:47 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: | o | Aug 03 16:38:47 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: +----[SHA256]-----+ Aug 03 16:38:47 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: Generating public/private ed25519 key pair. Aug 03 16:38:47 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: Your identification has been saved in /etc/ssh/ssh_host_ed25519_key Aug 03 16:38:47 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: Your public key has been saved in /etc/ssh/ssh_host_ed25519_key.pub Aug 03 16:38:47 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: The key fingerprint is: Aug 03 16:38:47 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: SHA256:MtJQ4JxB9EQd79wj6M4U+A+GFz9cH4BXaq8JsHFvXD4 root@ip-10-31-15-127.us-east-1.aws.redhat.com Aug 03 16:38:47 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: The key's randomart image is: Aug 03 16:38:47 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: +--[ED25519 256]--+ Aug 03 16:38:47 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: | o=o+... . | Aug 03 16:38:47 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: | o * .. . o | Aug 03 16:38:47 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: | = . o + = . | Aug 03 16:38:47 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: | o . O * = | Aug 03 16:38:47 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: | . = S = B E | Aug 03 16:38:47 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: | . * = = = o | Aug 03 16:38:47 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: | . B + o . | Aug 03 16:38:47 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: | * o . | Aug 03 16:38:47 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: | o . | Aug 03 16:38:47 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: +----[SHA256]-----+ Aug 03 16:38:48 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Finished cloud-init.service - Initial cloud-init job (metadata service crawler). ░░ Subject: A start job for unit cloud-init.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit cloud-init.service has finished successfully. ░░ ░░ The job identifier is 235. Aug 03 16:38:48 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Reached target cloud-config.target - Cloud-config availability. ░░ Subject: A start job for unit cloud-config.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit cloud-config.target has finished successfully. ░░ ░░ The job identifier is 244. Aug 03 16:38:48 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Reached target network-online.target - Network is Online. ░░ Subject: A start job for unit network-online.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit network-online.target has finished successfully. ░░ ░░ The job identifier is 199. Aug 03 16:38:48 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Starting cloud-config.service - Apply the settings specified in cloud-config... ░░ Subject: A start job for unit cloud-config.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit cloud-config.service has begun execution. ░░ ░░ The job identifier is 243. Aug 03 16:38:48 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Starting kdump.service - Crash recovery kernel arming... ░░ Subject: A start job for unit kdump.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit kdump.service has begun execution. ░░ ░░ The job identifier is 272. Aug 03 16:38:48 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Starting restraintd.service - The restraint harness.... ░░ Subject: A start job for unit restraintd.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit restraintd.service has begun execution. ░░ ░░ The job identifier is 266. Aug 03 16:38:48 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Starting rpc-statd-notify.service - Notify NFS peers of a restart... ░░ Subject: A start job for unit rpc-statd-notify.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit rpc-statd-notify.service has begun execution. ░░ ░░ The job identifier is 258. Aug 03 16:38:48 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Starting sshd.service - OpenSSH server daemon... ░░ Subject: A start job for unit sshd.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sshd.service has begun execution. ░░ ░░ The job identifier is 236. Aug 03 16:38:48 ip-10-31-15-127.us-east-1.aws.redhat.com (sshd)[848]: sshd.service: Referenced but unset environment variable evaluates to an empty string: OPTIONS Aug 03 16:38:48 ip-10-31-15-127.us-east-1.aws.redhat.com sm-notify[838]: Version 2.6.4 starting Aug 03 16:38:48 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Started rpc-statd-notify.service - Notify NFS peers of a restart. ░░ Subject: A start job for unit rpc-statd-notify.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit rpc-statd-notify.service has finished successfully. ░░ ░░ The job identifier is 258. Aug 03 16:38:48 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Started restraintd.service - The restraint harness.. ░░ Subject: A start job for unit restraintd.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit restraintd.service has finished successfully. ░░ ░░ The job identifier is 266. Aug 03 16:38:48 ip-10-31-15-127.us-east-1.aws.redhat.com sshd[848]: Server listening on 0.0.0.0 port 22. Aug 03 16:38:48 ip-10-31-15-127.us-east-1.aws.redhat.com sshd[848]: Server listening on :: port 22. Aug 03 16:38:48 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Started sshd.service - OpenSSH server daemon. ░░ Subject: A start job for unit sshd.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sshd.service has finished successfully. ░░ ░░ The job identifier is 236. Aug 03 16:38:48 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[867]: Cloud-init v. 24.1.4-14.el10 running 'modules:config' at Sat, 03 Aug 2024 16:38:48 +0000. Up 35.58 seconds. Aug 03 16:38:48 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Stopping sshd.service - OpenSSH server daemon... ░░ Subject: A stop job for unit sshd.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit sshd.service has begun execution. ░░ ░░ The job identifier is 571. Aug 03 16:38:48 ip-10-31-15-127.us-east-1.aws.redhat.com sshd[848]: Received signal 15; terminating. Aug 03 16:38:48 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: sshd.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit sshd.service has successfully entered the 'dead' state. Aug 03 16:38:48 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Stopped sshd.service - OpenSSH server daemon. ░░ Subject: A stop job for unit sshd.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit sshd.service has finished. ░░ ░░ The job identifier is 571 and the job result is done. Aug 03 16:38:48 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Stopped target sshd-keygen.target. ░░ Subject: A stop job for unit sshd-keygen.target has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit sshd-keygen.target has finished. ░░ ░░ The job identifier is 654 and the job result is done. Aug 03 16:38:48 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Stopping sshd-keygen.target... ░░ Subject: A stop job for unit sshd-keygen.target has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit sshd-keygen.target has begun execution. ░░ ░░ The job identifier is 654. Aug 03 16:38:48 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: ssh-host-keys-migration.service - Update OpenSSH host key permissions was skipped because of an unmet condition check (ConditionPathExists=!/var/lib/.ssh-host-keys-migration). ░░ Subject: A start job for unit ssh-host-keys-migration.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit ssh-host-keys-migration.service has finished successfully. ░░ ░░ The job identifier is 648. Aug 03 16:38:48 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: sshd-keygen@ecdsa.service - OpenSSH ecdsa Server Key Generation was skipped because of an unmet condition check (ConditionPathExists=!/run/systemd/generator.early/multi-user.target.wants/cloud-init.target). ░░ Subject: A start job for unit sshd-keygen@ecdsa.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sshd-keygen@ecdsa.service has finished successfully. ░░ ░░ The job identifier is 652. Aug 03 16:38:48 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: sshd-keygen@ed25519.service - OpenSSH ed25519 Server Key Generation was skipped because of an unmet condition check (ConditionPathExists=!/run/systemd/generator.early/multi-user.target.wants/cloud-init.target). ░░ Subject: A start job for unit sshd-keygen@ed25519.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sshd-keygen@ed25519.service has finished successfully. ░░ ░░ The job identifier is 650. Aug 03 16:38:48 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: sshd-keygen@rsa.service - OpenSSH rsa Server Key Generation was skipped because of an unmet condition check (ConditionPathExists=!/run/systemd/generator.early/multi-user.target.wants/cloud-init.target). ░░ Subject: A start job for unit sshd-keygen@rsa.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sshd-keygen@rsa.service has finished successfully. ░░ ░░ The job identifier is 653. Aug 03 16:38:48 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Reached target sshd-keygen.target. ░░ Subject: A start job for unit sshd-keygen.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sshd-keygen.target has finished successfully. ░░ ░░ The job identifier is 654. Aug 03 16:38:48 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Starting sshd.service - OpenSSH server daemon... ░░ Subject: A start job for unit sshd.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sshd.service has begun execution. ░░ ░░ The job identifier is 571. Aug 03 16:38:48 ip-10-31-15-127.us-east-1.aws.redhat.com (sshd)[874]: sshd.service: Referenced but unset environment variable evaluates to an empty string: OPTIONS Aug 03 16:38:48 ip-10-31-15-127.us-east-1.aws.redhat.com sshd[874]: Server listening on 0.0.0.0 port 22. Aug 03 16:38:48 ip-10-31-15-127.us-east-1.aws.redhat.com sshd[874]: Server listening on :: port 22. Aug 03 16:38:48 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Started sshd.service - OpenSSH server daemon. ░░ Subject: A start job for unit sshd.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sshd.service has finished successfully. ░░ ░░ The job identifier is 571. Aug 03 16:38:48 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Finished cloud-config.service - Apply the settings specified in cloud-config. ░░ Subject: A start job for unit cloud-config.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit cloud-config.service has finished successfully. ░░ ░░ The job identifier is 243. Aug 03 16:38:48 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Starting cloud-final.service - Execute cloud user/final scripts... ░░ Subject: A start job for unit cloud-final.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit cloud-final.service has begun execution. ░░ ░░ The job identifier is 245. Aug 03 16:38:48 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Starting systemd-user-sessions.service - Permit User Sessions... ░░ Subject: A start job for unit systemd-user-sessions.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-user-sessions.service has begun execution. ░░ ░░ The job identifier is 267. Aug 03 16:38:48 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Finished systemd-user-sessions.service - Permit User Sessions. ░░ Subject: A start job for unit systemd-user-sessions.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-user-sessions.service has finished successfully. ░░ ░░ The job identifier is 267. Aug 03 16:38:48 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Started crond.service - Command Scheduler. ░░ Subject: A start job for unit crond.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit crond.service has finished successfully. ░░ ░░ The job identifier is 229. Aug 03 16:38:48 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Started getty@tty1.service - Getty on tty1. ░░ Subject: A start job for unit getty@tty1.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit getty@tty1.service has finished successfully. ░░ ░░ The job identifier is 222. Aug 03 16:38:48 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Started serial-getty@ttyS0.service - Serial Getty on ttyS0. ░░ Subject: A start job for unit serial-getty@ttyS0.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit serial-getty@ttyS0.service has finished successfully. ░░ ░░ The job identifier is 226. Aug 03 16:38:48 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Reached target getty.target - Login Prompts. ░░ Subject: A start job for unit getty.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit getty.target has finished successfully. ░░ ░░ The job identifier is 221. Aug 03 16:38:48 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Reached target multi-user.target - Multi-User System. ░░ Subject: A start job for unit multi-user.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit multi-user.target has finished successfully. ░░ ░░ The job identifier is 118. Aug 03 16:38:48 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Starting systemd-update-utmp-runlevel.service - Record Runlevel Change in UTMP... ░░ Subject: A start job for unit systemd-update-utmp-runlevel.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-update-utmp-runlevel.service has begun execution. ░░ ░░ The job identifier is 250. Aug 03 16:38:48 ip-10-31-15-127.us-east-1.aws.redhat.com crond[891]: (CRON) STARTUP (1.7.0) Aug 03 16:38:48 ip-10-31-15-127.us-east-1.aws.redhat.com crond[891]: (CRON) INFO (Syslog will be used instead of sendmail.) Aug 03 16:38:48 ip-10-31-15-127.us-east-1.aws.redhat.com crond[891]: (CRON) INFO (RANDOM_DELAY will be scaled with factor 7% if used.) Aug 03 16:38:48 ip-10-31-15-127.us-east-1.aws.redhat.com crond[891]: (CRON) INFO (running with inotify support) Aug 03 16:38:48 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: systemd-update-utmp-runlevel.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-update-utmp-runlevel.service has successfully entered the 'dead' state. Aug 03 16:38:48 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Finished systemd-update-utmp-runlevel.service - Record Runlevel Change in UTMP. ░░ Subject: A start job for unit systemd-update-utmp-runlevel.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-update-utmp-runlevel.service has finished successfully. ░░ ░░ The job identifier is 250. Aug 03 16:38:48 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[898]: Cloud-init v. 24.1.4-14.el10 running 'modules:final' at Sat, 03 Aug 2024 16:38:48 +0000. Up 36.06 seconds. Aug 03 16:38:48 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[900]: ############################################################# Aug 03 16:38:48 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[901]: -----BEGIN SSH HOST KEY FINGERPRINTS----- Aug 03 16:38:49 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[903]: 256 SHA256:jE4mUbXoU7JTJPc3vhFSSUGyU5Fge2eMsO0gNuq8XGQ root@ip-10-31-15-127.us-east-1.aws.redhat.com (ECDSA) Aug 03 16:38:49 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[905]: 256 SHA256:MtJQ4JxB9EQd79wj6M4U+A+GFz9cH4BXaq8JsHFvXD4 root@ip-10-31-15-127.us-east-1.aws.redhat.com (ED25519) Aug 03 16:38:49 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[907]: 3072 SHA256:ZvesGiprDsEtUfJUwK3IbWOK393/5+Op2ojKDN75/FU root@ip-10-31-15-127.us-east-1.aws.redhat.com (RSA) Aug 03 16:38:49 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[908]: -----END SSH HOST KEY FINGERPRINTS----- Aug 03 16:38:49 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[909]: ############################################################# Aug 03 16:38:49 ip-10-31-15-127.us-east-1.aws.redhat.com restraintd[852]: Listening on http://localhost:8081 Aug 03 16:38:49 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[898]: Cloud-init v. 24.1.4-14.el10 finished at Sat, 03 Aug 2024 16:38:49 +0000. Datasource DataSourceEc2Local. Up 36.17 seconds Aug 03 16:38:49 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Finished cloud-final.service - Execute cloud user/final scripts. ░░ Subject: A start job for unit cloud-final.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit cloud-final.service has finished successfully. ░░ ░░ The job identifier is 245. Aug 03 16:38:49 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Reached target cloud-init.target - Cloud-init target. ░░ Subject: A start job for unit cloud-init.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit cloud-init.target has finished successfully. ░░ ░░ The job identifier is 233. Aug 03 16:38:49 ip-10-31-15-127.us-east-1.aws.redhat.com kdumpctl[841]: kdump: Detected change(s) in the following file(s): /etc/fstab Aug 03 16:38:52 ip-10-31-15-127.us-east-1.aws.redhat.com chronyd[631]: Selected source 10.2.32.38 Aug 03 16:38:52 ip-10-31-15-127.us-east-1.aws.redhat.com chronyd[631]: System clock TAI offset set to 37 seconds Aug 03 16:38:54 ip-10-31-15-127.us-east-1.aws.redhat.com kernel: block xvda: the capability attribute has been deprecated. Aug 03 16:38:54 ip-10-31-15-127.us-east-1.aws.redhat.com kdumpctl[841]: kdump: Rebuilding /boot/initramfs-6.10.0-15.el10.x86_64kdump.img Aug 03 16:38:55 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state. Aug 03 16:38:56 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1384]: dracut-101-2.el10 Aug 03 16:38:56 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: Executing: /usr/bin/dracut --add kdumpbase --quiet --hostonly --hostonly-cmdline --hostonly-i18n --hostonly-mode strict --hostonly-nics --aggressive-strip -o "plymouth resume ifcfg earlykdump" --mount "/dev/disk/by-uuid/8605af15-4596-4ba0-84aa-95550e824316 /sysroot xfs rw,relatime,seclabel,attr2,inode64,logbufs=8,logbsize=32k,noquota" --squash-compressor zstd --no-hostonly-default-device -f /boot/initramfs-6.10.0-15.el10.x86_64kdump.img 6.10.0-15.el10.x86_64 Aug 03 16:38:56 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: Module 'systemd-networkd' will not be installed, because command 'networkctl' could not be found! Aug 03 16:38:56 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: Module 'systemd-networkd' will not be installed, because command '/usr/lib/systemd/systemd-networkd' could not be found! Aug 03 16:38:56 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: Module 'systemd-networkd' will not be installed, because command '/usr/lib/systemd/systemd-networkd-wait-online' could not be found! Aug 03 16:38:56 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: Module 'systemd-pcrphase' will not be installed, because command '/usr/lib/systemd/systemd-pcrphase' could not be found! Aug 03 16:38:56 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: Module 'systemd-portabled' will not be installed, because command 'portablectl' could not be found! Aug 03 16:38:56 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: Module 'systemd-portabled' will not be installed, because command '/usr/lib/systemd/systemd-portabled' could not be found! Aug 03 16:38:57 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: Module 'systemd-resolved' will not be installed, because command '/usr/lib/systemd/systemd-resolved' could not be found! Aug 03 16:38:57 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: Module 'systemd-timesyncd' will not be installed, because command '/usr/lib/systemd/systemd-timesyncd' could not be found! Aug 03 16:38:57 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: Module 'systemd-timesyncd' will not be installed, because command '/usr/lib/systemd/systemd-time-wait-sync' could not be found! Aug 03 16:38:57 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: Module 'busybox' will not be installed, because command 'busybox' could not be found! Aug 03 16:38:57 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: Module 'dbus-daemon' will not be installed, because command 'dbus-daemon' could not be found! Aug 03 16:38:57 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: Module 'connman' will not be installed, because command 'connmand' could not be found! Aug 03 16:38:57 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: Module 'connman' will not be installed, because command 'connmanctl' could not be found! Aug 03 16:38:57 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: Module 'connman' will not be installed, because command 'connmand-wait-online' could not be found! Aug 03 16:38:57 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: Module 'ifcfg' will not be installed, because it's in the list to be omitted! Aug 03 16:38:57 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: Module 'plymouth' will not be installed, because it's in the list to be omitted! Aug 03 16:38:57 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: 62bluetooth: Could not find any command of '/usr/lib/bluetooth/bluetoothd /usr/libexec/bluetooth/bluetoothd'! Aug 03 16:38:57 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: Module 'btrfs' will not be installed, because command 'btrfs' could not be found! Aug 03 16:38:57 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: Module 'dmraid' will not be installed, because command 'dmraid' could not be found! Aug 03 16:38:57 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: Module 'mdraid' will not be installed, because command 'mdadm' could not be found! Aug 03 16:38:57 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: Module 'cifs' will not be installed, because command 'mount.cifs' could not be found! Aug 03 16:38:57 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: Module 'iscsi' will not be installed, because command 'iscsi-iname' could not be found! Aug 03 16:38:57 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: Module 'iscsi' will not be installed, because command 'iscsiadm' could not be found! Aug 03 16:38:57 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: Module 'iscsi' will not be installed, because command 'iscsid' could not be found! Aug 03 16:38:57 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: Module 'nvmf' will not be installed, because command 'nvme' could not be found! Aug 03 16:38:57 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: Module 'resume' will not be installed, because it's in the list to be omitted! Aug 03 16:38:57 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: Module 'biosdevname' will not be installed, because command 'biosdevname' could not be found! Aug 03 16:38:57 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: Module 'earlykdump' will not be installed, because it's in the list to be omitted! Aug 03 16:38:57 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: Module 'systemd-pcrphase' will not be installed, because command '/usr/lib/systemd/systemd-pcrphase' could not be found! Aug 03 16:38:57 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: Module 'systemd-portabled' will not be installed, because command 'portablectl' could not be found! Aug 03 16:38:57 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: Module 'systemd-portabled' will not be installed, because command '/usr/lib/systemd/systemd-portabled' could not be found! Aug 03 16:38:57 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: Module 'systemd-resolved' will not be installed, because command '/usr/lib/systemd/systemd-resolved' could not be found! Aug 03 16:38:57 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: Module 'systemd-timesyncd' will not be installed, because command '/usr/lib/systemd/systemd-timesyncd' could not be found! Aug 03 16:38:57 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: Module 'systemd-timesyncd' will not be installed, because command '/usr/lib/systemd/systemd-time-wait-sync' could not be found! Aug 03 16:38:57 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: Module 'busybox' will not be installed, because command 'busybox' could not be found! Aug 03 16:38:57 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: Module 'dbus-daemon' will not be installed, because command 'dbus-daemon' could not be found! Aug 03 16:38:57 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: Module 'connman' will not be installed, because command 'connmand' could not be found! Aug 03 16:38:57 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: Module 'connman' will not be installed, because command 'connmanctl' could not be found! Aug 03 16:38:57 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: Module 'connman' will not be installed, because command 'connmand-wait-online' could not be found! Aug 03 16:38:57 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: 62bluetooth: Could not find any command of '/usr/lib/bluetooth/bluetoothd /usr/libexec/bluetooth/bluetoothd'! Aug 03 16:38:57 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: Module 'btrfs' will not be installed, because command 'btrfs' could not be found! Aug 03 16:38:57 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: Module 'dmraid' will not be installed, because command 'dmraid' could not be found! Aug 03 16:38:57 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: Module 'mdraid' will not be installed, because command 'mdadm' could not be found! Aug 03 16:38:57 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: Module 'cifs' will not be installed, because command 'mount.cifs' could not be found! Aug 03 16:38:57 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: Module 'iscsi' will not be installed, because command 'iscsi-iname' could not be found! Aug 03 16:38:57 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: Module 'iscsi' will not be installed, because command 'iscsiadm' could not be found! Aug 03 16:38:57 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: Module 'iscsi' will not be installed, because command 'iscsid' could not be found! Aug 03 16:38:57 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: Module 'nvmf' will not be installed, because command 'nvme' could not be found! Aug 03 16:38:57 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: *** Including module: systemd *** Aug 03 16:38:58 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: *** Including module: systemd-initrd *** Aug 03 16:38:58 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: *** Including module: rngd *** Aug 03 16:38:58 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: *** Including module: i18n *** Aug 03 16:38:58 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: *** Including module: drm *** Aug 03 16:38:59 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: *** Including module: prefixdevname *** Aug 03 16:38:59 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: *** Including module: kernel-modules *** Aug 03 16:39:00 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: *** Including module: kernel-modules-extra *** Aug 03 16:39:00 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: kernel-modules-extra: configuration source "/run/depmod.d" does not exist Aug 03 16:39:00 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: kernel-modules-extra: configuration source "/lib/depmod.d" does not exist Aug 03 16:39:00 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: kernel-modules-extra: parsing configuration file "/etc/depmod.d/dist.conf" Aug 03 16:39:00 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: kernel-modules-extra: /etc/depmod.d/dist.conf: added "updates extra built-in weak-updates" to the list of search directories Aug 03 16:39:00 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: *** Including module: fstab-sys *** Aug 03 16:39:00 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: *** Including module: rootfs-block *** Aug 03 16:39:00 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: *** Including module: terminfo *** Aug 03 16:39:00 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: *** Including module: udev-rules *** Aug 03 16:39:00 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: *** Including module: dracut-systemd *** Aug 03 16:39:00 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: *** Including module: usrmount *** Aug 03 16:39:00 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: *** Including module: base *** Aug 03 16:39:00 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: *** Including module: fs-lib *** Aug 03 16:39:00 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: *** Including module: kdumpbase *** Aug 03 16:39:01 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: *** Including module: memstrack *** Aug 03 16:39:01 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: *** Including module: microcode_ctl-fw_dir_override *** Aug 03 16:39:01 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: microcode_ctl module: mangling fw_dir Aug 03 16:39:01 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: microcode_ctl: reset fw_dir to "/lib/firmware/updates /lib/firmware" Aug 03 16:39:01 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: microcode_ctl: processing data directory "/usr/share/microcode_ctl/ucode_with_caveats/intel"... Aug 03 16:39:01 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: microcode_ctl: intel: caveats check for kernel version "6.10.0-15.el10.x86_64" passed, adding "/usr/share/microcode_ctl/ucode_with_caveats/intel" to fw_dir variable Aug 03 16:39:01 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: microcode_ctl: processing data directory "/usr/share/microcode_ctl/ucode_with_caveats/intel-06-4f-01"... Aug 03 16:39:01 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: microcode_ctl: configuration "intel-06-4f-01" is ignored Aug 03 16:39:01 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: microcode_ctl: final fw_dir: "/usr/share/microcode_ctl/ucode_with_caveats/intel /lib/firmware/updates /lib/firmware" Aug 03 16:39:01 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: *** Including module: shutdown *** Aug 03 16:39:01 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: *** Including module: squash *** Aug 03 16:39:01 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: *** Including modules done *** Aug 03 16:39:01 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: *** Installing kernel module dependencies *** Aug 03 16:39:01 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: *** Installing kernel module dependencies done *** Aug 03 16:39:01 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: *** Resolving executable dependencies *** Aug 03 16:39:03 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: *** Resolving executable dependencies done *** Aug 03 16:39:03 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: *** Hardlinking files *** Aug 03 16:39:03 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: Mode: real Aug 03 16:39:03 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: Method: sha256 Aug 03 16:39:03 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: Files: 440 Aug 03 16:39:03 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: Linked: 1 files Aug 03 16:39:03 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: Compared: 0 xattrs Aug 03 16:39:03 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: Compared: 11 files Aug 03 16:39:03 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: Saved: 60.55 KiB Aug 03 16:39:03 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: Duration: 0.008510 seconds Aug 03 16:39:03 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: *** Hardlinking files done *** Aug 03 16:39:03 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: *** Generating early-microcode cpio image *** Aug 03 16:39:03 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: *** Constructing GenuineIntel.bin *** Aug 03 16:39:03 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: *** Constructing GenuineIntel.bin *** Aug 03 16:39:03 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: *** Store current command line parameters *** Aug 03 16:39:03 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: Stored kernel commandline: Aug 03 16:39:03 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: No dracut internal kernel commandline stored in the initramfs Aug 03 16:39:03 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: *** Install squash loader *** Aug 03 16:39:04 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: *** Stripping files *** Aug 03 16:39:05 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: *** Stripping files done *** Aug 03 16:39:05 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: *** Squashing the files inside the initramfs *** Aug 03 16:39:10 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: *** Squashing the files inside the initramfs done *** Aug 03 16:39:10 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: *** Creating image file '/boot/initramfs-6.10.0-15.el10.x86_64kdump.img' *** Aug 03 16:39:11 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: *** Creating initramfs image file '/boot/initramfs-6.10.0-15.el10.x86_64kdump.img' done *** Aug 03 16:39:11 ip-10-31-15-127.us-east-1.aws.redhat.com kernel: PKCS7: Message signed outside of X.509 validity window Aug 03 16:39:11 ip-10-31-15-127.us-east-1.aws.redhat.com kdumpctl[841]: kdump: kexec: loaded kdump kernel Aug 03 16:39:11 ip-10-31-15-127.us-east-1.aws.redhat.com kdumpctl[841]: kdump: Starting kdump: [OK] Aug 03 16:39:11 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Finished kdump.service - Crash recovery kernel arming. ░░ Subject: A start job for unit kdump.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit kdump.service has finished successfully. ░░ ░░ The job identifier is 272. Aug 03 16:39:11 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Startup finished in 990ms (kernel) + 6.157s (initrd) + 51.405s (userspace) = 58.552s. ░░ Subject: System start-up is now complete ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ All system services necessary queued for starting at boot have been ░░ started. Note that this does not mean that the machine is now idle as services ░░ might still be busy with completing start-up. ░░ ░░ Kernel start-up required 990036 microseconds. ░░ ░░ Initrd start-up required 6157085 microseconds. ░░ ░░ Userspace start-up required 51405179 microseconds. Aug 03 16:39:15 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: systemd-hostnamed.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-hostnamed.service has successfully entered the 'dead' state. Aug 03 16:40:08 ip-10-31-15-127.us-east-1.aws.redhat.com sshd-session[3733]: Accepted publickey for root from 10.30.34.46 port 48996 ssh2: RSA SHA256:W3cSdmPJK+d9RwU97ardijPXIZnxHswrpTHWW9oYtEU Aug 03 16:40:08 ip-10-31-15-127.us-east-1.aws.redhat.com sshd-session[3733]: pam_systemd(sshd:session): New sd-bus connection (system-bus-pam-systemd-3733) opened. Aug 03 16:40:08 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Created slice user-0.slice - User Slice of UID 0. ░░ Subject: A start job for unit user-0.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user-0.slice has finished successfully. ░░ ░░ The job identifier is 741. Aug 03 16:40:08 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Starting user-runtime-dir@0.service - User Runtime Directory /run/user/0... ░░ Subject: A start job for unit user-runtime-dir@0.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user-runtime-dir@0.service has begun execution. ░░ ░░ The job identifier is 664. Aug 03 16:40:08 ip-10-31-15-127.us-east-1.aws.redhat.com systemd-logind[612]: New session 1 of user root. ░░ Subject: A new session 1 has been created for user root ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A new session with the ID 1 has been created for the user root. ░░ ░░ The leading process of the session is 3733. Aug 03 16:40:08 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Finished user-runtime-dir@0.service - User Runtime Directory /run/user/0. ░░ Subject: A start job for unit user-runtime-dir@0.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user-runtime-dir@0.service has finished successfully. ░░ ░░ The job identifier is 664. Aug 03 16:40:08 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Starting user@0.service - User Manager for UID 0... ░░ Subject: A start job for unit user@0.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user@0.service has begun execution. ░░ ░░ The job identifier is 743. Aug 03 16:40:08 ip-10-31-15-127.us-east-1.aws.redhat.com systemd-logind[612]: New session 2 of user root. ░░ Subject: A new session 2 has been created for user root ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A new session with the ID 2 has been created for the user root. ░░ ░░ The leading process of the session is 3738. Aug 03 16:40:08 ip-10-31-15-127.us-east-1.aws.redhat.com (systemd)[3738]: pam_unix(systemd-user:session): session opened for user root(uid=0) by root(uid=0) Aug 03 16:40:08 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[3738]: Queued start job for default target default.target. Aug 03 16:40:08 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[3738]: Created slice app.slice - User Application Slice. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 7. Aug 03 16:40:08 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[3738]: grub-boot-success.timer - Mark boot as successful after the user session has run 2 minutes was skipped because of an unmet condition check (ConditionUser=!@system). ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 11. Aug 03 16:40:08 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[3738]: Started systemd-tmpfiles-clean.timer - Daily Cleanup of User's Temporary Directories. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 12. Aug 03 16:40:08 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[3738]: Reached target paths.target - Paths. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 3. Aug 03 16:40:08 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[3738]: Reached target timers.target - Timers. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 10. Aug 03 16:40:08 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[3738]: Starting dbus.socket - D-Bus User Message Bus Socket... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 6. Aug 03 16:40:08 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[3738]: Starting systemd-tmpfiles-setup.service - Create User Files and Directories... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 9. Aug 03 16:40:08 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[3738]: Finished systemd-tmpfiles-setup.service - Create User Files and Directories. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 9. Aug 03 16:40:08 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[3738]: Listening on dbus.socket - D-Bus User Message Bus Socket. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 6. Aug 03 16:40:08 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[3738]: Reached target sockets.target - Sockets. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 5. Aug 03 16:40:08 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[3738]: Reached target basic.target - Basic System. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 2. Aug 03 16:40:08 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[3738]: Reached target default.target - Main User Target. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 1. Aug 03 16:40:08 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Started user@0.service - User Manager for UID 0. ░░ Subject: A start job for unit user@0.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user@0.service has finished successfully. ░░ ░░ The job identifier is 743. Aug 03 16:40:08 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[3738]: Startup finished in 194ms. ░░ Subject: User manager start-up is now complete ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The user manager instance for user 0 has been started. All services queued ░░ for starting have been started. Note that other services might still be starting ░░ up or be started at any later time. ░░ ░░ Startup of the manager took 194709 microseconds. Aug 03 16:40:08 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Started session-1.scope - Session 1 of User root. ░░ Subject: A start job for unit session-1.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit session-1.scope has finished successfully. ░░ ░░ The job identifier is 823. Aug 03 16:40:08 ip-10-31-15-127.us-east-1.aws.redhat.com sshd-session[3733]: pam_unix(sshd:session): session opened for user root(uid=0) by root(uid=0) Aug 03 16:40:09 ip-10-31-15-127.us-east-1.aws.redhat.com sshd-session[3749]: Received disconnect from 10.30.34.46 port 48996:11: disconnected by user Aug 03 16:40:09 ip-10-31-15-127.us-east-1.aws.redhat.com sshd-session[3749]: Disconnected from user root 10.30.34.46 port 48996 Aug 03 16:40:09 ip-10-31-15-127.us-east-1.aws.redhat.com sshd-session[3733]: pam_systemd(sshd:session): New sd-bus connection (system-bus-pam-systemd-3733) opened. Aug 03 16:40:09 ip-10-31-15-127.us-east-1.aws.redhat.com sshd-session[3733]: pam_unix(sshd:session): session closed for user root Aug 03 16:40:09 ip-10-31-15-127.us-east-1.aws.redhat.com systemd-logind[612]: Session 1 logged out. Waiting for processes to exit. Aug 03 16:40:09 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: session-1.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit session-1.scope has successfully entered the 'dead' state. Aug 03 16:40:09 ip-10-31-15-127.us-east-1.aws.redhat.com systemd-logind[612]: Removed session 1. ░░ Subject: Session 1 has been terminated ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A session with the ID 1 has been terminated. Aug 03 16:40:15 ip-10-31-15-127.us-east-1.aws.redhat.com sshd-session[3781]: Connection closed by 10.29.163.172 port 59608 Aug 03 16:40:15 ip-10-31-15-127.us-east-1.aws.redhat.com sshd-session[3782]: Accepted publickey for root from 10.29.163.172 port 59620 ssh2: RSA SHA256:W3cSdmPJK+d9RwU97ardijPXIZnxHswrpTHWW9oYtEU Aug 03 16:40:15 ip-10-31-15-127.us-east-1.aws.redhat.com sshd-session[3782]: pam_systemd(sshd:session): New sd-bus connection (system-bus-pam-systemd-3782) opened. Aug 03 16:40:15 ip-10-31-15-127.us-east-1.aws.redhat.com systemd-logind[612]: New session 3 of user root. ░░ Subject: A new session 3 has been created for user root ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A new session with the ID 3 has been created for the user root. ░░ ░░ The leading process of the session is 3782. Aug 03 16:40:15 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Started session-3.scope - Session 3 of User root. ░░ Subject: A start job for unit session-3.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit session-3.scope has finished successfully. ░░ ░░ The job identifier is 904. Aug 03 16:40:15 ip-10-31-15-127.us-east-1.aws.redhat.com sshd-session[3782]: pam_unix(sshd:session): session opened for user root(uid=0) by root(uid=0) Aug 03 16:40:15 ip-10-31-15-127.us-east-1.aws.redhat.com sshd-session[3785]: Received disconnect from 10.29.163.172 port 59620:11: disconnected by user Aug 03 16:40:15 ip-10-31-15-127.us-east-1.aws.redhat.com sshd-session[3785]: Disconnected from user root 10.29.163.172 port 59620 Aug 03 16:40:15 ip-10-31-15-127.us-east-1.aws.redhat.com sshd-session[3782]: pam_systemd(sshd:session): New sd-bus connection (system-bus-pam-systemd-3782) opened. Aug 03 16:40:15 ip-10-31-15-127.us-east-1.aws.redhat.com sshd-session[3782]: pam_unix(sshd:session): session closed for user root Aug 03 16:40:15 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: session-3.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit session-3.scope has successfully entered the 'dead' state. Aug 03 16:40:15 ip-10-31-15-127.us-east-1.aws.redhat.com systemd-logind[612]: Session 3 logged out. Waiting for processes to exit. Aug 03 16:40:15 ip-10-31-15-127.us-east-1.aws.redhat.com systemd-logind[612]: Removed session 3. ░░ Subject: Session 3 has been terminated ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A session with the ID 3 has been terminated. Aug 03 16:40:15 ip-10-31-15-127.us-east-1.aws.redhat.com sshd-session[3804]: Accepted publickey for root from 10.29.163.172 port 59632 ssh2: RSA SHA256:W3cSdmPJK+d9RwU97ardijPXIZnxHswrpTHWW9oYtEU Aug 03 16:40:15 ip-10-31-15-127.us-east-1.aws.redhat.com sshd-session[3804]: pam_systemd(sshd:session): New sd-bus connection (system-bus-pam-systemd-3804) opened. Aug 03 16:40:15 ip-10-31-15-127.us-east-1.aws.redhat.com systemd-logind[612]: New session 4 of user root. ░░ Subject: A new session 4 has been created for user root ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A new session with the ID 4 has been created for the user root. ░░ ░░ The leading process of the session is 3804. Aug 03 16:40:15 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Started session-4.scope - Session 4 of User root. ░░ Subject: A start job for unit session-4.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit session-4.scope has finished successfully. ░░ ░░ The job identifier is 985. Aug 03 16:40:15 ip-10-31-15-127.us-east-1.aws.redhat.com sshd-session[3804]: pam_unix(sshd:session): session opened for user root(uid=0) by root(uid=0) Aug 03 16:40:15 ip-10-31-15-127.us-east-1.aws.redhat.com sshd-session[3807]: Received disconnect from 10.29.163.172 port 59632:11: disconnected by user Aug 03 16:40:15 ip-10-31-15-127.us-east-1.aws.redhat.com sshd-session[3807]: Disconnected from user root 10.29.163.172 port 59632 Aug 03 16:40:15 ip-10-31-15-127.us-east-1.aws.redhat.com sshd-session[3804]: pam_systemd(sshd:session): New sd-bus connection (system-bus-pam-systemd-3804) opened. Aug 03 16:40:15 ip-10-31-15-127.us-east-1.aws.redhat.com sshd-session[3804]: pam_unix(sshd:session): session closed for user root Aug 03 16:40:15 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: session-4.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit session-4.scope has successfully entered the 'dead' state. Aug 03 16:40:15 ip-10-31-15-127.us-east-1.aws.redhat.com systemd-logind[612]: Session 4 logged out. Waiting for processes to exit. Aug 03 16:40:15 ip-10-31-15-127.us-east-1.aws.redhat.com systemd-logind[612]: Removed session 4. ░░ Subject: Session 4 has been terminated ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A session with the ID 4 has been terminated. Aug 03 16:40:15 ip-10-31-15-127.us-east-1.aws.redhat.com sshd-session[3826]: Accepted publickey for root from 10.29.163.172 port 59638 ssh2: RSA SHA256:W3cSdmPJK+d9RwU97ardijPXIZnxHswrpTHWW9oYtEU Aug 03 16:40:15 ip-10-31-15-127.us-east-1.aws.redhat.com sshd-session[3826]: pam_systemd(sshd:session): New sd-bus connection (system-bus-pam-systemd-3826) opened. Aug 03 16:40:15 ip-10-31-15-127.us-east-1.aws.redhat.com systemd-logind[612]: New session 5 of user root. ░░ Subject: A new session 5 has been created for user root ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A new session with the ID 5 has been created for the user root. ░░ ░░ The leading process of the session is 3826. Aug 03 16:40:15 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Started session-5.scope - Session 5 of User root. ░░ Subject: A start job for unit session-5.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit session-5.scope has finished successfully. ░░ ░░ The job identifier is 1066. Aug 03 16:40:15 ip-10-31-15-127.us-east-1.aws.redhat.com sshd-session[3826]: pam_unix(sshd:session): session opened for user root(uid=0) by root(uid=0) Aug 03 16:40:15 ip-10-31-15-127.us-east-1.aws.redhat.com sshd-session[3829]: Received disconnect from 10.29.163.172 port 59638:11: disconnected by user Aug 03 16:40:15 ip-10-31-15-127.us-east-1.aws.redhat.com sshd-session[3829]: Disconnected from user root 10.29.163.172 port 59638 Aug 03 16:40:15 ip-10-31-15-127.us-east-1.aws.redhat.com sshd-session[3826]: pam_systemd(sshd:session): New sd-bus connection (system-bus-pam-systemd-3826) opened. Aug 03 16:40:15 ip-10-31-15-127.us-east-1.aws.redhat.com sshd-session[3826]: pam_unix(sshd:session): session closed for user root Aug 03 16:40:15 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: session-5.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit session-5.scope has successfully entered the 'dead' state. Aug 03 16:40:15 ip-10-31-15-127.us-east-1.aws.redhat.com systemd-logind[612]: Session 5 logged out. Waiting for processes to exit. Aug 03 16:40:15 ip-10-31-15-127.us-east-1.aws.redhat.com systemd-logind[612]: Removed session 5. ░░ Subject: Session 5 has been terminated ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A session with the ID 5 has been terminated. Aug 03 16:40:16 ip-10-31-15-127.us-east-1.aws.redhat.com sshd-session[3848]: Accepted publickey for root from 10.29.163.172 port 59642 ssh2: RSA SHA256:W3cSdmPJK+d9RwU97ardijPXIZnxHswrpTHWW9oYtEU Aug 03 16:40:16 ip-10-31-15-127.us-east-1.aws.redhat.com sshd-session[3848]: pam_systemd(sshd:session): New sd-bus connection (system-bus-pam-systemd-3848) opened. Aug 03 16:40:16 ip-10-31-15-127.us-east-1.aws.redhat.com systemd-logind[612]: New session 6 of user root. ░░ Subject: A new session 6 has been created for user root ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A new session with the ID 6 has been created for the user root. ░░ ░░ The leading process of the session is 3848. Aug 03 16:40:16 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Started session-6.scope - Session 6 of User root. ░░ Subject: A start job for unit session-6.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit session-6.scope has finished successfully. ░░ ░░ The job identifier is 1147. Aug 03 16:40:16 ip-10-31-15-127.us-east-1.aws.redhat.com sshd-session[3848]: pam_unix(sshd:session): session opened for user root(uid=0) by root(uid=0) Aug 03 16:40:30 ip-10-31-15-127.us-east-1.aws.redhat.com python3[3940]: ansible-ansible.legacy.setup Invoked with gather_subset=['all'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d Aug 03 16:40:32 ip-10-31-15-127.us-east-1.aws.redhat.com python3[3985]: ansible-service_facts Invoked Aug 03 16:40:33 ip-10-31-15-127.us-east-1.aws.redhat.com python3[4103]: ansible-ansible.legacy.command Invoked with _raw_params=rpm -qa | sort _uses_shell=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Aug 03 16:40:34 ip-10-31-15-127.us-east-1.aws.redhat.com python3[4124]: ansible-stat Invoked with path=/etc/yum.repos.d/qa-tools.repo follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Aug 03 16:40:35 ip-10-31-15-127.us-east-1.aws.redhat.com python3[4142]: ansible-ansible.legacy.dnf Invoked with name=['ca-certificates'] state=latest allow_downgrade=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 allowerasing=False nobest=False conf_file=None disable_excludes=None download_dir=None list=None releasever=None Aug 03 16:40:35 ip-10-31-15-127.us-east-1.aws.redhat.com python3[4161]: ansible-ansible.legacy.dnf Invoked with name=['curl'] state=latest allow_downgrade=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 allowerasing=False nobest=False conf_file=None disable_excludes=None download_dir=None list=None releasever=None Aug 03 16:40:35 ip-10-31-15-127.us-east-1.aws.redhat.com python3[4180]: ansible-ansible.legacy.command Invoked with _raw_params=curl -skL -o /etc/pki/ca-trust/source/anchors/Current-IT-Root-CAs.pem https://certs.corp.redhat.com/certs/Current-IT-Root-CAs.pem _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Aug 03 16:40:36 ip-10-31-15-127.us-east-1.aws.redhat.com python3[4200]: ansible-ansible.legacy.command Invoked with _raw_params=bash -c 'update-ca-trust' _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Aug 03 16:40:37 ip-10-31-15-127.us-east-1.aws.redhat.com python3[4228]: ansible-ansible.legacy.command Invoked with _raw_params=bash -c 'update-ca-trust extract' _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Aug 03 16:40:39 ip-10-31-15-127.us-east-1.aws.redhat.com python3[4256]: ansible-ansible.legacy.command Invoked with _raw_params=curl -v https://gitlab.cee.redhat.com _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Aug 03 16:40:39 ip-10-31-15-127.us-east-1.aws.redhat.com python3[4276]: ansible-ansible.legacy.command Invoked with _raw_params=curl -v https://beaker.engineering.redhat.com/ _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Aug 03 16:40:40 ip-10-31-15-127.us-east-1.aws.redhat.com python3[4296]: ansible-file Invoked with path=/etc/yum.repos.d/CentOS-Media.repo state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Aug 03 16:40:40 ip-10-31-15-127.us-east-1.aws.redhat.com python3[4314]: ansible-file Invoked with path=/etc/yum.repos.d/CentOS-Linux-Media.repo state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Aug 03 16:40:40 ip-10-31-15-127.us-east-1.aws.redhat.com python3[4332]: ansible-file Invoked with path=/etc/yum.repos.d/CentOS-Stream-Media.repo state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Aug 03 16:40:40 ip-10-31-15-127.us-east-1.aws.redhat.com python3[4350]: ansible-file Invoked with path=/etc/yum.repos.d/beaker-client-testing.repo state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Aug 03 16:40:41 ip-10-31-15-127.us-east-1.aws.redhat.com python3[4368]: ansible-stat Invoked with path=/etc/yum.repos.d/beaker-client.repo follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Aug 03 16:40:42 ip-10-31-15-127.us-east-1.aws.redhat.com python3[4388]: ansible-replace Invoked with path=/etc/yum.repos.d/beaker-client.repo regexp=7|\$releasever replace=8 backup=False encoding=utf-8 unsafe_writes=False after=None before=None validate=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Aug 03 16:40:42 ip-10-31-15-127.us-east-1.aws.redhat.com python3[4406]: ansible-stat Invoked with path=/etc/yum.repos.d/beaker-client.repo follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Aug 03 16:40:43 ip-10-31-15-127.us-east-1.aws.redhat.com python3[4426]: ansible-community.general.ini_file Invoked with path=/etc/yum.repos.d/beaker-client.repo section=beaker-client option=skip_if_unavailable value=1 backup=False state=present exclusive=True no_extra_spaces=False ignore_spaces=False allow_no_value=False modify_inactive_option=True create=True follow=False unsafe_writes=False values=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Aug 03 16:40:43 ip-10-31-15-127.us-east-1.aws.redhat.com python3[4444]: ansible-ansible.legacy.command Invoked with _raw_params=for repofile in $(ls -1 /etc/yum.repos.d/); do echo "# vvvvv $repofile vvvvv vvvvv"; cat "/etc/yum.repos.d/$repofile"; echo "# ^^^^^ ^^^^^ ^^^^^ ^^^^^"; echo ""; done _uses_shell=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Aug 03 16:40:43 ip-10-31-15-127.us-east-1.aws.redhat.com python3[4470]: ansible-ansible.legacy.command Invoked with _raw_params=yum -q repolist _uses_shell=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Aug 03 16:40:43 ip-10-31-15-127.us-east-1.aws.redhat.com python3[4489]: ansible-stat Invoked with path=/etc/yum.repos.d/baseos-ci.repo follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Aug 03 16:40:44 ip-10-31-15-127.us-east-1.aws.redhat.com python3[4507]: ansible-ansible.legacy.command Invoked with _raw_params=for repofile in $(ls -1 /etc/yum.repos.d/); do echo "# vvvvv $repofile vvvvv vvvvv"; cat "/etc/yum.repos.d/$repofile"; echo "# ^^^^^ ^^^^^ ^^^^^ ^^^^^"; echo ""; done _uses_shell=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Aug 03 16:40:44 ip-10-31-15-127.us-east-1.aws.redhat.com python3[4533]: ansible-ansible.legacy.command Invoked with _raw_params=yum -q repolist _uses_shell=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Aug 03 16:40:45 ip-10-31-15-127.us-east-1.aws.redhat.com python3[4552]: ansible-ansible.legacy.command Invoked with _raw_params=for repofile in $(ls -1 /etc/yum.repos.d/); do echo "# vvvvv $repofile vvvvv vvvvv"; cat "/etc/yum.repos.d/$repofile"; echo "# ^^^^^ ^^^^^ ^^^^^ ^^^^^"; echo ""; done _uses_shell=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Aug 03 16:40:45 ip-10-31-15-127.us-east-1.aws.redhat.com python3[4578]: ansible-ansible.legacy.command Invoked with _raw_params=yum -q repolist _uses_shell=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Aug 03 16:40:45 ip-10-31-15-127.us-east-1.aws.redhat.com python3[4597]: ansible-ansible.legacy.command Invoked with _raw_params=yum repolist --enablerepo '*' | grep -q rhel-buildroot _uses_shell=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Aug 03 16:40:46 ip-10-31-15-127.us-east-1.aws.redhat.com python3[4618]: ansible-ansible.legacy.command Invoked with _raw_params=for repofile in $(ls -1 /etc/yum.repos.d/); do echo "# vvvvv $repofile vvvvv vvvvv"; cat "/etc/yum.repos.d/$repofile"; echo "# ^^^^^ ^^^^^ ^^^^^ ^^^^^"; echo ""; done _uses_shell=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Aug 03 16:40:46 ip-10-31-15-127.us-east-1.aws.redhat.com python3[4644]: ansible-ansible.legacy.command Invoked with _raw_params=yum -q repolist _uses_shell=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Aug 03 16:40:47 ip-10-31-15-127.us-east-1.aws.redhat.com python3[4663]: ansible-ansible.legacy.command Invoked with _raw_params=for repofile in $(ls -1 /etc/yum.repos.d/); do echo "# vvvvv $repofile vvvvv vvvvv"; cat "/etc/yum.repos.d/$repofile"; echo "# ^^^^^ ^^^^^ ^^^^^ ^^^^^"; echo ""; done _uses_shell=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Aug 03 16:40:47 ip-10-31-15-127.us-east-1.aws.redhat.com python3[4689]: ansible-ansible.legacy.command Invoked with _raw_params=yum -q repolist _uses_shell=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Aug 03 16:40:47 ip-10-31-15-127.us-east-1.aws.redhat.com python3[4708]: ansible-ansible.legacy.command Invoked with _raw_params=dnf clean all --enablerepo=* _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Aug 03 16:40:48 ip-10-31-15-127.us-east-1.aws.redhat.com python3[4727]: ansible-ansible.legacy.command Invoked with _raw_params=dnf makecache --enablerepo=* _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Aug 03 16:40:58 ip-10-31-15-127.us-east-1.aws.redhat.com python3[4803]: ansible-ansible.legacy.dnf Invoked with name=['createrepo'] state=latest allow_downgrade=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 allowerasing=False nobest=False conf_file=None disable_excludes=None download_dir=None list=None releasever=None Aug 03 16:41:01 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Started run-r7add66fa0cd14ff4bbdb8d7e1f59c760.service - /usr/bin/systemctl start man-db-cache-update. ░░ Subject: A start job for unit run-r7add66fa0cd14ff4bbdb8d7e1f59c760.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit run-r7add66fa0cd14ff4bbdb8d7e1f59c760.service has finished successfully. ░░ ░░ The job identifier is 1228. Aug 03 16:41:01 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Starting man-db-cache-update.service... ░░ Subject: A start job for unit man-db-cache-update.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit man-db-cache-update.service has begun execution. ░░ ░░ The job identifier is 1305. Aug 03 16:41:02 ip-10-31-15-127.us-east-1.aws.redhat.com python3[4830]: ansible-ansible.legacy.dnf Invoked with name=['make'] state=latest allow_downgrade=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 allowerasing=False nobest=False conf_file=None disable_excludes=None download_dir=None list=None releasever=None Aug 03 16:41:02 ip-10-31-15-127.us-east-1.aws.redhat.com python3[4849]: ansible-ansible.legacy.dnf Invoked with name=['parted'] state=latest allow_downgrade=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 allowerasing=False nobest=False conf_file=None disable_excludes=None download_dir=None list=None releasever=None Aug 03 16:41:03 ip-10-31-15-127.us-east-1.aws.redhat.com python3[4868]: ansible-ansible.legacy.dnf Invoked with name=['psmisc'] state=latest allow_downgrade=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 allowerasing=False nobest=False conf_file=None disable_excludes=None download_dir=None list=None releasever=None Aug 03 16:41:03 ip-10-31-15-127.us-east-1.aws.redhat.com python3[4887]: ansible-ansible.legacy.dnf Invoked with name=['chrony'] state=latest allow_downgrade=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 allowerasing=False nobest=False conf_file=None disable_excludes=None download_dir=None list=None releasever=None Aug 03 16:41:03 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: man-db-cache-update.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit man-db-cache-update.service has successfully entered the 'dead' state. Aug 03 16:41:03 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Finished man-db-cache-update.service. ░░ Subject: A start job for unit man-db-cache-update.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit man-db-cache-update.service has finished successfully. ░░ ░░ The job identifier is 1305. Aug 03 16:41:03 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: run-r7add66fa0cd14ff4bbdb8d7e1f59c760.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-r7add66fa0cd14ff4bbdb8d7e1f59c760.service has successfully entered the 'dead' state. Aug 03 16:41:04 ip-10-31-15-127.us-east-1.aws.redhat.com python3[4910]: ansible-ansible.legacy.command Invoked with _raw_params=bash -c "chronyc sources" _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Aug 03 16:41:08 ip-10-31-15-127.us-east-1.aws.redhat.com python3[4929]: ansible-ansible.legacy.command Invoked with _raw_params=bash -c "chronyc sources | grep redhat | grep -v util.phx2.redhat || chronyc add server clock.corp.redhat.com iburst" _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Aug 03 16:41:19 ip-10-31-15-127.us-east-1.aws.redhat.com python3[4951]: ansible-ansible.legacy.command Invoked with _raw_params=bash -c "chronyc sources" _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Aug 03 16:41:26 ip-10-31-15-127.us-east-1.aws.redhat.com python3[4970]: ansible-ansible.legacy.command Invoked with _raw_params=chronyc waitsync 5 _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Aug 03 16:41:26 ip-10-31-15-127.us-east-1.aws.redhat.com python3[4989]: ansible-ansible.legacy.command Invoked with _raw_params=setenforce 0; timedatectl set-timezone UTC; setenforce 1 _uses_shell=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Aug 03 16:41:26 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Starting systemd-timedated.service - Time & Date Service... ░░ Subject: A start job for unit systemd-timedated.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-timedated.service has begun execution. ░░ ░░ The job identifier is 1382. Aug 03 16:41:26 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Started systemd-timedated.service - Time & Date Service. ░░ Subject: A start job for unit systemd-timedated.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-timedated.service has finished successfully. ░░ ░░ The job identifier is 1382. Aug 03 16:41:26 ip-10-31-15-127.us-east-1.aws.redhat.com systemd-timedated[4993]: Changed time zone to 'UTC' (UTC). ░░ Subject: Time zone change to UTC ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The system timezone has been changed to UTC. Aug 03 16:41:27 ip-10-31-15-127.us-east-1.aws.redhat.com python3[5011]: ansible-yum_repository Invoked with state=present name=beaker-tasks description=Beaker tasks baseurl=['http://beaker.engineering.redhat.com/rpms/'] gpgcheck=False enabled=True reposdir=/etc/yum.repos.d unsafe_writes=False bandwidth=None cost=None deltarpm_metadata_percentage=None deltarpm_percentage=None enablegroups=None exclude=None failovermethod=None file=None gpgcakey=None gpgkey=None module_hotfixes=None http_caching=None include=None includepkgs=None ip_resolve=None keepalive=None keepcache=None metadata_expire=None metadata_expire_filter=None metalink=None mirrorlist=None mirrorlist_expire=None params=None password=NOT_LOGGING_PARAMETER priority=None protect=None proxy=None proxy_password=NOT_LOGGING_PARAMETER proxy_username=None repo_gpgcheck=None retries=None s3_enabled=None skip_if_unavailable=None sslcacert=None ssl_check_cert_permissions=None sslclientcert=None sslclientkey=None sslverify=None throttle=None timeout=None ui_repoid_vars=None username=None async=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Aug 03 16:41:27 ip-10-31-15-127.us-east-1.aws.redhat.com python3[5029]: ansible-user Invoked with name=root update_password=always password=NOT_LOGGING_PARAMETER state=present non_unique=False force=False remove=False create_home=True system=False move_home=False append=False ssh_key_bits=0 ssh_key_type=rsa ssh_key_comment=ansible-generated on ip-10-31-15-127.us-east-1.aws.redhat.com uid=None group=None groups=None comment=None home=None shell=None login_class=None password_expire_max=None password_expire_min=None hidden=None seuser=None skeleton=None generate_ssh_key=None ssh_key_file=None ssh_key_passphrase=NOT_LOGGING_PARAMETER expires=None password_lock=None local=None profile=None authorization=None role=None umask=None Aug 03 16:41:27 ip-10-31-15-127.us-east-1.aws.redhat.com usermod[5032]: change user 'root' password Aug 03 16:41:28 ip-10-31-15-127.us-east-1.aws.redhat.com python3[5054]: ansible-lineinfile Invoked with dest=/etc/ssh/sshd_config regexp=#?PasswordAuthentication (?:yes|no) line=PasswordAuthentication yes state=present path=/etc/ssh/sshd_config backrefs=False create=False backup=False firstmatch=False unsafe_writes=False search_string=None insertafter=None insertbefore=None validate=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Aug 03 16:41:28 ip-10-31-15-127.us-east-1.aws.redhat.com python3[5072]: ansible-lineinfile Invoked with dest=/etc/ssh/sshd_config line=PermitRootLogin yes state=present path=/etc/ssh/sshd_config backrefs=False create=False backup=False firstmatch=False unsafe_writes=False regexp=None search_string=None insertafter=None insertbefore=None validate=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Aug 03 16:41:28 ip-10-31-15-127.us-east-1.aws.redhat.com python3[5090]: ansible-file Invoked with path=/var/lib/tft state=directory recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Aug 03 16:41:28 ip-10-31-15-127.us-east-1.aws.redhat.com python3[5108]: ansible-stat Invoked with path=/var/lib/tft/lib.sh follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Aug 03 16:41:29 ip-10-31-15-127.us-east-1.aws.redhat.com python3[5166]: ansible-ansible.legacy.stat Invoked with path=/var/lib/tft/lib.sh follow=False get_checksum=True checksum_algorithm=sha1 get_md5=False get_mime=True get_attributes=True Aug 03 16:41:29 ip-10-31-15-127.us-east-1.aws.redhat.com python3[5219]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1722703288.7922935-9143-9606793578585/source dest=/var/lib/tft/lib.sh owner=root group=root mode=u=rx,go= follow=False _original_basename=lib.sh.j2 checksum=af779f2a149bc6695c9d2d1622342e81371886ab backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Aug 03 16:41:29 ip-10-31-15-127.us-east-1.aws.redhat.com python3[5255]: ansible-stat Invoked with path=/usr/local/bin/ci-extendtesttime.sh follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Aug 03 16:41:29 ip-10-31-15-127.us-east-1.aws.redhat.com rsyslogd[611]: imjournal: journal files changed, reloading... [v8.2312.0-2.el10 try https://www.rsyslog.com/e/0 ] Aug 03 16:41:29 ip-10-31-15-127.us-east-1.aws.redhat.com python3[5314]: ansible-ansible.legacy.stat Invoked with path=/usr/local/bin/ci-extendtesttime.sh follow=False get_checksum=True checksum_algorithm=sha1 get_md5=False get_mime=True get_attributes=True Aug 03 16:41:30 ip-10-31-15-127.us-east-1.aws.redhat.com python3[5367]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1722703289.6370099-9156-125064816134732/source dest=/usr/local/bin/ci-extendtesttime.sh owner=root group=root mode=u=rx,go= follow=False _original_basename=ci-extendtesttime.sh.j2 checksum=2d0f6c296873c17e8b22c9490b000973b2a8a350 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Aug 03 16:41:30 ip-10-31-15-127.us-east-1.aws.redhat.com python3[5403]: ansible-stat Invoked with path=/usr/bin/extendtesttime.sh follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Aug 03 16:41:30 ip-10-31-15-127.us-east-1.aws.redhat.com python3[5421]: ansible-stat Invoked with path=/usr/local/bin/ci-return2pool.sh follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Aug 03 16:41:30 ip-10-31-15-127.us-east-1.aws.redhat.com python3[5479]: ansible-ansible.legacy.stat Invoked with path=/usr/local/bin/ci-return2pool.sh follow=False get_checksum=True checksum_algorithm=sha1 get_md5=False get_mime=True get_attributes=True Aug 03 16:41:31 ip-10-31-15-127.us-east-1.aws.redhat.com python3[5532]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1722703290.6154213-9176-210523413906836/source dest=/usr/local/bin/ci-return2pool.sh owner=root group=root mode=u=rx,go= follow=False _original_basename=ci-return2pool.sh.j2 checksum=f77cae3b2a729c60bcb0c01c82bf347f13a9b2c5 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Aug 03 16:41:31 ip-10-31-15-127.us-east-1.aws.redhat.com python3[5568]: ansible-stat Invoked with path=/etc/motd follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Aug 03 16:41:31 ip-10-31-15-127.us-east-1.aws.redhat.com python3[5628]: ansible-ansible.legacy.stat Invoked with path=/root/.guest-metadata.json follow=False get_checksum=True checksum_algorithm=sha1 get_md5=False get_mime=True get_attributes=True Aug 03 16:41:31 ip-10-31-15-127.us-east-1.aws.redhat.com python3[5681]: ansible-ansible.legacy.copy Invoked with dest=/root/.guest-metadata.json owner=root group=root mode=u=rw,go= src=/root/.ansible/tmp/ansible-tmp-1722703291.3756824-9202-125095011241297/source _original_basename=tmpmial_d_c follow=False checksum=bf21a9e8fbc5a3846fb05b4fa0859e0917b2202f backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Aug 03 16:41:32 ip-10-31-15-127.us-east-1.aws.redhat.com python3[5717]: ansible-ansible.legacy.command Invoked with _raw_params=curl -sLI --connect-timeout 5 -w '%{response_code}' http://169.254.169.254/latest/meta-data/instance-id | grep ^200 _uses_shell=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Aug 03 16:41:32 ip-10-31-15-127.us-east-1.aws.redhat.com python3[5738]: ansible-hostname Invoked with name=721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm use=None Aug 03 16:41:32 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Starting systemd-hostnamed.service - Hostname Service... ░░ Subject: A start job for unit systemd-hostnamed.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-hostnamed.service has begun execution. ░░ ░░ The job identifier is 1459. Aug 03 16:41:32 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Started systemd-hostnamed.service - Hostname Service. ░░ Subject: A start job for unit systemd-hostnamed.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-hostnamed.service has finished successfully. ░░ ░░ The job identifier is 1459. Aug 03 16:41:32 ip-10-31-15-127.us-east-1.aws.redhat.com systemd-hostnamed[5742]: Changed pretty hostname to '721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm' Aug 03 16:41:32 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm systemd-hostnamed[5742]: Hostname set to <721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm> (static) Aug 03 16:41:32 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm NetworkManager[668]: [1722703292.6855] hostname: static hostname changed from "ip-10-31-15-127.us-east-1.aws.redhat.com" to "721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm" Aug 03 16:41:32 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm systemd[1]: Starting NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service... ░░ Subject: A start job for unit NetworkManager-dispatcher.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has begun execution. ░░ ░░ The job identifier is 1536. Aug 03 16:41:32 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm systemd[1]: Started NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service. ░░ Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has finished successfully. ░░ ░░ The job identifier is 1536. Aug 03 16:41:32 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3[5772]: ansible-ansible.legacy.command Invoked with _raw_params=ping -4 -c1 -W10 $(hostname) _uses_shell=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Aug 03 16:41:33 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3[5792]: ansible-file Invoked with path=/var/log/messages state=touch recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Aug 03 16:41:33 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3[5810]: ansible-ansible.legacy.command Invoked with _raw_params=setsebool nis_enabled on _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Aug 03 16:41:33 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm setsebool[5811]: The nis_enabled policy boolean was changed to on by root Aug 03 16:41:33 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3[5829]: ansible-stat Invoked with path=/usr/bin/rstrnt-package follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Aug 03 16:41:34 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3[5849]: ansible-ansible.legacy.command Invoked with _raw_params=sed -e 's/rpm -q $package/rpm -q --whatprovides $package/' -i /usr/bin/rstrnt-package _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Aug 03 16:41:34 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3[5868]: ansible-stat Invoked with path=/var/lib/restraint follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Aug 03 16:41:34 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3[5888]: ansible-file Invoked with path=/var/lib/restraint/avc_since state=touch recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Aug 03 16:41:34 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3[5906]: ansible-stat Invoked with path=/usr/share/beakerlib/beakerlib.sh follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Aug 03 16:41:35 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3[5926]: ansible-file Invoked with dest=/usr/lib/beakerlib state=directory path=/usr/lib/beakerlib recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Aug 03 16:41:35 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3[5944]: ansible-file Invoked with dest=/usr/share/rhts-library state=directory path=/usr/share/rhts-library recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Aug 03 16:41:35 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3[5962]: ansible-file Invoked with src=/usr/share/beakerlib/beakerlib.sh dest=/usr/lib/beakerlib/beakerlib.sh state=link path=/usr/lib/beakerlib/beakerlib.sh recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Aug 03 16:41:35 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3[5980]: ansible-file Invoked with src=/usr/share/beakerlib/beakerlib.sh dest=/usr/share/rhts-library/rhtslib.sh state=link path=/usr/share/rhts-library/rhtslib.sh recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Aug 03 16:41:35 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3[5998]: ansible-ansible.legacy.command Invoked with _raw_params=mv /var/log/audit/audit.log /var/log/audit/audit.log.bak _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Aug 03 16:41:36 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3[6017]: ansible-ansible.legacy.command Invoked with _raw_params=/usr/sbin/service auditd restart _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Aug 03 16:41:36 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm auditd[580]: The audit daemon is exiting. Aug 03 16:41:36 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: audit: type=1305 audit(1722703296.152:732): op=set audit_pid=0 old=580 auid=4294967295 ses=4294967295 subj=system_u:system_r:auditd_t:s0 res=1 Aug 03 16:41:36 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm systemd[1]: auditd.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit auditd.service has successfully entered the 'dead' state. Aug 03 16:41:36 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: audit: type=1131 audit(1722703296.156:733): pid=1 uid=0 auid=4294967295 ses=4294967295 subj=system_u:system_r:init_t:s0 msg='unit=auditd comm="systemd" exe="/usr/lib/systemd/systemd" hostname=? addr=? terminal=? res=success' Aug 03 16:41:36 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: audit: type=2310 audit(1722703296.156:734): pid=603 uid=81 auid=4294967295 ses=4294967295 subj=system_u:system_r:system_dbusd_t:s0-s0:c0.c1023 msg='avc: op=load_policy lsm=selinux seqno=2 res=1 exe="/usr/bin/dbus-broker" sauid=81 hostname=? addr=? terminal=?' Aug 03 16:41:37 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm systemd[1]: Starting audit-rules.service - Load Audit Rules... ░░ Subject: A start job for unit audit-rules.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit audit-rules.service has begun execution. ░░ ░░ The job identifier is 1615. Aug 03 16:41:37 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm augenrules[6027]: /sbin/augenrules: No change Aug 03 16:41:37 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: audit: type=1305 audit(1722703297.189:735): op=set audit_backlog_limit=8192 old=8192 auid=4294967295 ses=4294967295 subj=system_u:system_r:unconfined_service_t:s0 res=1 Aug 03 16:41:37 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm augenrules[6037]: No rules Aug 03 16:41:37 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm augenrules[6037]: enabled 1 Aug 03 16:41:37 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm augenrules[6037]: failure 1 Aug 03 16:41:37 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm augenrules[6037]: pid 0 Aug 03 16:41:37 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm augenrules[6037]: rate_limit 0 Aug 03 16:41:37 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm augenrules[6037]: backlog_limit 8192 Aug 03 16:41:37 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm augenrules[6037]: lost 0 Aug 03 16:41:37 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm augenrules[6037]: backlog 3 Aug 03 16:41:37 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm augenrules[6037]: backlog_wait_time 60000 Aug 03 16:41:37 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm augenrules[6037]: backlog_wait_time_actual 0 Aug 03 16:41:37 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm augenrules[6037]: enabled 1 Aug 03 16:41:37 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm augenrules[6037]: failure 1 Aug 03 16:41:37 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm augenrules[6037]: pid 0 Aug 03 16:41:37 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm augenrules[6037]: rate_limit 0 Aug 03 16:41:37 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm augenrules[6037]: backlog_limit 8192 Aug 03 16:41:37 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm augenrules[6037]: lost 0 Aug 03 16:41:37 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm augenrules[6037]: backlog 7 Aug 03 16:41:37 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm augenrules[6037]: backlog_wait_time 60000 Aug 03 16:41:37 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm augenrules[6037]: backlog_wait_time_actual 0 Aug 03 16:41:37 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm augenrules[6037]: enabled 1 Aug 03 16:41:37 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm augenrules[6037]: failure 1 Aug 03 16:41:37 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm augenrules[6037]: pid 0 Aug 03 16:41:37 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm augenrules[6037]: rate_limit 0 Aug 03 16:41:37 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm augenrules[6037]: backlog_limit 8192 Aug 03 16:41:37 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm augenrules[6037]: lost 0 Aug 03 16:41:37 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm augenrules[6037]: backlog 11 Aug 03 16:41:37 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm augenrules[6037]: backlog_wait_time 60000 Aug 03 16:41:37 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm augenrules[6037]: backlog_wait_time_actual 0 Aug 03 16:41:37 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: audit: type=1300 audit(1722703297.189:735): arch=c000003e syscall=44 success=yes exit=60 a0=3 a1=7ffc90be7840 a2=3c a3=0 items=0 ppid=6027 pid=6037 auid=4294967295 uid=0 gid=0 euid=0 suid=0 fsuid=0 egid=0 sgid=0 fsgid=0 tty=(none) ses=4294967295 comm="auditctl" exe="/usr/sbin/auditctl" subj=system_u:system_r:unconfined_service_t:s0 key=(null) Aug 03 16:41:37 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: audit: type=1327 audit(1722703297.189:735): proctitle=2F7362696E2F617564697463746C002D52002F6574632F61756469742F61756469742E72756C6573 Aug 03 16:41:37 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: audit: type=1305 audit(1722703297.189:736): op=set audit_failure=1 old=1 auid=4294967295 ses=4294967295 subj=system_u:system_r:unconfined_service_t:s0 res=1 Aug 03 16:41:37 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: audit: type=1300 audit(1722703297.189:736): arch=c000003e syscall=44 success=yes exit=60 a0=3 a1=7ffc90be7840 a2=3c a3=0 items=0 ppid=6027 pid=6037 auid=4294967295 uid=0 gid=0 euid=0 suid=0 fsuid=0 egid=0 sgid=0 fsgid=0 tty=(none) ses=4294967295 comm="auditctl" exe="/usr/sbin/auditctl" subj=system_u:system_r:unconfined_service_t:s0 key=(null) Aug 03 16:41:37 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: audit: type=1327 audit(1722703297.189:736): proctitle=2F7362696E2F617564697463746C002D52002F6574632F61756469742F61756469742E72756C6573 Aug 03 16:41:37 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: audit: type=1305 audit(1722703297.189:737): op=set audit_backlog_wait_time=60000 old=60000 auid=4294967295 ses=4294967295 subj=system_u:system_r:unconfined_service_t:s0 res=1 Aug 03 16:41:37 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm systemd[1]: audit-rules.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit audit-rules.service has successfully entered the 'dead' state. Aug 03 16:41:37 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm systemd[1]: Finished audit-rules.service - Load Audit Rules. ░░ Subject: A start job for unit audit-rules.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit audit-rules.service has finished successfully. ░░ ░░ The job identifier is 1615. Aug 03 16:41:37 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm systemd[1]: Starting auditd.service - Security Audit Logging Service... ░░ Subject: A start job for unit auditd.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit auditd.service has begun execution. ░░ ░░ The job identifier is 1614. Aug 03 16:41:37 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm auditd[6046]: No plugins found, not dispatching events Aug 03 16:41:37 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm auditd[6046]: Init complete, auditd 4.0 listening for events (startup state enable) Aug 03 16:41:37 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm systemd[1]: Started auditd.service - Security Audit Logging Service. ░░ Subject: A start job for unit auditd.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit auditd.service has finished successfully. ░░ ░░ The job identifier is 1614. Aug 03 16:41:37 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3[6067]: ansible-stat Invoked with path=/etc/NetworkManager/conf.d follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Aug 03 16:41:37 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3[6127]: ansible-ansible.legacy.stat Invoked with path=/etc/NetworkManager/conf.d/99-zzz-tft-workaround-dns-default.conf follow=False get_checksum=True checksum_algorithm=sha1 get_md5=False get_mime=True get_attributes=True Aug 03 16:41:37 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3[6180]: ansible-ansible.legacy.copy Invoked with dest=/etc/NetworkManager/conf.d/99-zzz-tft-workaround-dns-default.conf src=/root/.ansible/tmp/ansible-tmp-1722703297.5009277-9344-116220217129658/source _original_basename=tmpa81l80ed follow=False checksum=0145f3ae57eef5aa08bbb678fedbb3edd001cd2d backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Aug 03 16:41:38 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3[6216]: ansible-ansible.legacy.command Invoked with _raw_params=for repofile in $(ls -1 /etc/yum.repos.d/); do echo "# vvvvv $repofile vvvvv vvvvv"; cat "/etc/yum.repos.d/$repofile"; echo "# ^^^^^ ^^^^^ ^^^^^ ^^^^^"; echo ""; done _uses_shell=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Aug 03 16:41:38 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3[6242]: ansible-ansible.legacy.command Invoked with _raw_params=yum -q repolist _uses_shell=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Aug 03 16:41:39 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3[6261]: ansible-ansible.legacy.command Invoked with _raw_params=dnf clean all --enablerepo=* _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Aug 03 16:41:39 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3[6280]: ansible-ansible.legacy.command Invoked with _raw_params=dnf makecache --enablerepo=* _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Aug 03 16:41:42 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state. Aug 03 16:41:50 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3[6394]: ansible-ansible.legacy.setup Invoked with gather_subset=['all'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d Aug 03 16:41:51 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3[6439]: ansible-service_facts Invoked Aug 03 16:41:53 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3[6556]: ansible-ansible.legacy.command Invoked with _raw_params=rpm -qa | sort _uses_shell=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Aug 03 16:41:53 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3[6577]: ansible-ansible.legacy.command Invoked with _raw_params=if [ $(find /etc/yum.repos.d/ -name 'epel.repo' | wc -l ) -gt 0 ]; then dnf config-manager --set-disabled epel; fi _uses_shell=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Aug 03 16:41:54 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3[6599]: ansible-ansible.legacy.command Invoked with _raw_params=dnf clean all --enablerepo=* _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Aug 03 16:41:55 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3[6618]: ansible-ansible.legacy.command Invoked with _raw_params=dnf makecache --enablerepo=* _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Aug 03 16:41:56 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm systemd[1]: systemd-timedated.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-timedated.service has successfully entered the 'dead' state. Aug 03 16:42:02 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm systemd[1]: systemd-hostnamed.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-hostnamed.service has successfully entered the 'dead' state. Aug 03 16:42:06 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3[6737]: ansible-ansible.legacy.setup Invoked with gather_subset=['all'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d Aug 03 16:42:07 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3[6782]: ansible-service_facts Invoked Aug 03 16:42:09 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3[6900]: ansible-ansible.legacy.command Invoked with _raw_params=rpm -qa | sort _uses_shell=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Aug 03 16:42:10 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3[6958]: ansible-ansible.legacy.setup Invoked with gather_subset=['all'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d Aug 03 16:42:11 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3[7003]: ansible-ansible.legacy.dnf Invoked with name=['ca-certificates'] state=latest allow_downgrade=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 allowerasing=False nobest=False conf_file=None disable_excludes=None download_dir=None list=None releasever=None Aug 03 16:42:12 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3[7022]: ansible-ansible.legacy.dnf Invoked with name=['curl'] state=latest allow_downgrade=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 allowerasing=False nobest=False conf_file=None disable_excludes=None download_dir=None list=None releasever=None Aug 03 16:42:12 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3[7041]: ansible-ansible.legacy.command Invoked with _raw_params=curl -skL -o /etc/pki/ca-trust/source/anchors/Current-IT-Root-CAs.pem https://certs.corp.redhat.com/certs/Current-IT-Root-CAs.pem _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Aug 03 16:42:13 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3[7061]: ansible-ansible.legacy.command Invoked with _raw_params=bash -c 'update-ca-trust' _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Aug 03 16:42:14 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3[7089]: ansible-ansible.legacy.command Invoked with _raw_params=bash -c 'update-ca-trust extract' _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Aug 03 16:42:15 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3[7117]: ansible-ansible.legacy.command Invoked with _raw_params=curl -v https://gitlab.cee.redhat.com _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Aug 03 16:42:15 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3[7137]: ansible-ansible.legacy.command Invoked with _raw_params=curl -v https://beaker.engineering.redhat.com/ _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Aug 03 16:42:16 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3[7157]: ansible-service_facts Invoked Aug 03 16:42:18 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3[7275]: ansible-ansible.legacy.command Invoked with _raw_params=rpm -qa | sort _uses_shell=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Aug 03 16:42:18 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3[7296]: ansible-service_facts Invoked Aug 03 16:42:20 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3[7414]: ansible-ansible.legacy.command Invoked with _raw_params=rpm -qa | sort _uses_shell=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Aug 03 16:42:20 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3[7435]: ansible-ansible.legacy.command Invoked with _raw_params=for repofile in $(ls -1 /etc/yum.repos.d/); do echo "# vvvvv $repofile vvvvv vvvvv"; cat "/etc/yum.repos.d/$repofile"; echo "# ^^^^^ ^^^^^ ^^^^^ ^^^^^"; echo ""; done _uses_shell=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Aug 03 16:42:21 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3[7461]: ansible-ansible.legacy.command Invoked with _raw_params=yum -q repolist _uses_shell=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Aug 03 16:42:21 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3[7480]: ansible-ansible.legacy.command Invoked with _raw_params=dnf clean all --enablerepo=* _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Aug 03 16:42:22 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3[7499]: ansible-ansible.legacy.command Invoked with _raw_params=dnf makecache --enablerepo=* _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Aug 03 16:42:33 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3.12[7612]: ansible-ansible.legacy.setup Invoked with gather_subset=['all'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d Aug 03 16:42:34 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3.12[7659]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Aug 03 16:42:34 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3.12[7677]: ansible-ansible.legacy.dnf Invoked with name=['python3-pyasn1', 'python3-cryptography', 'python3-dbus'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Aug 03 16:42:37 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3.12[7700]: ansible-ansible.legacy.dnf Invoked with name=['certmonger', 'python3-packaging'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Aug 03 16:42:39 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm dbus-broker-launch[602]: Noticed file-system modification, trigger reload. ░░ Subject: A configuration directory was written to ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ A write was detected to one of the directories containing D-Bus configuration ░░ files, triggering a configuration reload. ░░ ░░ This functionality exists for backwards compatibility to pick up changes to ░░ D-Bus configuration without an explicit reolad request. Typically when ░░ installing or removing third-party software causes D-Bus configuration files ░░ to be added or removed. ░░ ░░ It is worth noting that this may cause partial configuration to be loaded in ░░ case dispatching this notification races with the writing of the configuration ░░ files. However, a future notification will then cause the configuration to be ░░ reladed again. Aug 03 16:42:39 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm dbus-broker-launch[602]: Noticed file-system modification, trigger reload. ░░ Subject: A configuration directory was written to ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ A write was detected to one of the directories containing D-Bus configuration ░░ files, triggering a configuration reload. ░░ ░░ This functionality exists for backwards compatibility to pick up changes to ░░ D-Bus configuration without an explicit reolad request. Typically when ░░ installing or removing third-party software causes D-Bus configuration files ░░ to be added or removed. ░░ ░░ It is worth noting that this may cause partial configuration to be loaded in ░░ case dispatching this notification races with the writing of the configuration ░░ files. However, a future notification will then cause the configuration to be ░░ reladed again. Aug 03 16:42:39 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm dbus-broker-launch[602]: Noticed file-system modification, trigger reload. ░░ Subject: A configuration directory was written to ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ A write was detected to one of the directories containing D-Bus configuration ░░ files, triggering a configuration reload. ░░ ░░ This functionality exists for backwards compatibility to pick up changes to ░░ D-Bus configuration without an explicit reolad request. Typically when ░░ installing or removing third-party software causes D-Bus configuration files ░░ to be added or removed. ░░ ░░ It is worth noting that this may cause partial configuration to be loaded in ░░ case dispatching this notification races with the writing of the configuration ░░ files. However, a future notification will then cause the configuration to be ░░ reladed again. Aug 03 16:42:39 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm systemd[1]: Reload requested from client PID 7708 ('systemctl') (unit session-6.scope)... Aug 03 16:42:39 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm systemd[1]: Reloading... Aug 03 16:42:39 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm systemd[1]: Reloading finished in 169 ms. Aug 03 16:42:39 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm systemd[1]: Started run-rc4a75727ad5043baaa3a7236ce1063c8.service - /usr/bin/systemctl start man-db-cache-update. ░░ Subject: A start job for unit run-rc4a75727ad5043baaa3a7236ce1063c8.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit run-rc4a75727ad5043baaa3a7236ce1063c8.service has finished successfully. ░░ ░░ The job identifier is 1625. Aug 03 16:42:39 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm systemd[1]: Starting man-db-cache-update.service... ░░ Subject: A start job for unit man-db-cache-update.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit man-db-cache-update.service has begun execution. ░░ ░░ The job identifier is 1702. Aug 03 16:42:39 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm systemd[1]: Reload requested from client PID 7761 ('systemctl') (unit session-6.scope)... Aug 03 16:42:39 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm systemd[1]: Reloading... Aug 03 16:42:39 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm systemd[1]: Reloading finished in 205 ms. Aug 03 16:42:39 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm systemd[1]: Queuing reload/restart jobs for marked units… Aug 03 16:42:40 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3.12[7840]: ansible-file Invoked with name=/etc/certmonger//pre-scripts owner=root group=root mode=0700 state=directory path=/etc/certmonger//pre-scripts recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Aug 03 16:42:40 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3.12[7858]: ansible-file Invoked with name=/etc/certmonger//post-scripts owner=root group=root mode=0700 state=directory path=/etc/certmonger//post-scripts recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Aug 03 16:42:41 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3.12[7876]: ansible-ansible.legacy.systemd Invoked with name=certmonger state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Aug 03 16:42:41 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm systemd[1]: Reload requested from client PID 7879 ('systemctl') (unit session-6.scope)... Aug 03 16:42:41 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm systemd[1]: Reloading... Aug 03 16:42:41 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm systemd[1]: Reloading finished in 180 ms. Aug 03 16:42:41 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm systemd[1]: Starting certmonger.service - Certificate monitoring and PKI enrollment... ░░ Subject: A start job for unit certmonger.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit certmonger.service has begun execution. ░░ ░░ The job identifier is 1779. Aug 03 16:42:41 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm (rtmonger)[7927]: certmonger.service: Referenced but unset environment variable evaluates to an empty string: OPTS Aug 03 16:42:41 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm systemd[1]: Started certmonger.service - Certificate monitoring and PKI enrollment. ░░ Subject: A start job for unit certmonger.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit certmonger.service has finished successfully. ░░ ░░ The job identifier is 1779. Aug 03 16:42:42 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3.12[7969]: ansible-fedora.linux_system_roles.certificate_request Invoked with name=quadlet_demo dns=['localhost'] directory=/etc/pki/tls wait=True ca=self-sign __header=# # Ansible managed # # system_role:certificate provider_config_directory=/etc/certmonger provider=certmonger key_usage=['digitalSignature', 'keyEncipherment'] extended_key_usage=['id-kp-serverAuth', 'id-kp-clientAuth'] auto_renew=True ip=None email=None common_name=None country=None state=None locality=None organization=None organizational_unit=None contact_email=None key_size=None owner=None group=None mode=None principal=None run_before=None run_after=None Aug 03 16:42:42 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm certmonger[7927]: 2024-08-03 16:42:42 [7927] Wrote to /var/lib/certmonger/requests/20240803164242 Aug 03 16:42:42 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm certmonger[7927]: 2024-08-03 16:42:42 [7927] Wrote to /var/lib/certmonger/requests/20240803164242 Aug 03 16:42:42 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm certmonger[7927]: 2024-08-03 16:42:42 [7927] Wrote to /var/lib/certmonger/requests/20240803164242 Aug 03 16:42:42 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm certmonger[7927]: 2024-08-03 16:42:42 [7927] Wrote to /var/lib/certmonger/requests/20240803164242 Aug 03 16:42:42 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm certmonger[7927]: 2024-08-03 16:42:42 [7927] Wrote to /var/lib/certmonger/requests/20240803164242 Aug 03 16:42:42 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm certmonger[7927]: 2024-08-03 16:42:42 [7927] Wrote to /var/lib/certmonger/requests/20240803164242 Aug 03 16:42:42 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm certmonger[7927]: 2024-08-03 16:42:42 [7927] Wrote to /var/lib/certmonger/requests/20240803164242 Aug 03 16:42:42 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm certmonger[7927]: 2024-08-03 16:42:42 [7927] Wrote to /var/lib/certmonger/requests/20240803164242 Aug 03 16:42:42 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm certmonger[7927]: 2024-08-03 16:42:42 [7927] Wrote to /var/lib/certmonger/requests/20240803164242 Aug 03 16:42:42 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm certmonger[7927]: 2024-08-03 16:42:42 [7927] Wrote to /var/lib/certmonger/requests/20240803164242 Aug 03 16:42:42 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm certmonger[7927]: 2024-08-03 16:42:42 [7927] Wrote to /var/lib/certmonger/requests/20240803164242 Aug 03 16:42:42 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm certmonger[7927]: 2024-08-03 16:42:42 [7927] Wrote to /var/lib/certmonger/requests/20240803164242 Aug 03 16:42:42 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm certmonger[7927]: 2024-08-03 16:42:42 [7927] Wrote to /var/lib/certmonger/requests/20240803164242 Aug 03 16:42:42 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm certmonger[7927]: 2024-08-03 16:42:42 [7927] Wrote to /var/lib/certmonger/requests/20240803164242 Aug 03 16:42:42 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm certmonger[7927]: 2024-08-03 16:42:42 [7927] Wrote to /var/lib/certmonger/requests/20240803164242 Aug 03 16:42:42 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm certmonger[7927]: 2024-08-03 16:42:42 [7927] Wrote to /var/lib/certmonger/requests/20240803164242 Aug 03 16:42:42 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm certmonger[7927]: 2024-08-03 16:42:42 [7927] Wrote to /var/lib/certmonger/requests/20240803164242 Aug 03 16:42:42 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm certmonger[7927]: 2024-08-03 16:42:42 [7927] Wrote to /var/lib/certmonger/requests/20240803164242 Aug 03 16:42:42 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm certmonger[7927]: 2024-08-03 16:42:42 [7927] Wrote to /var/lib/certmonger/requests/20240803164242 Aug 03 16:42:42 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm certmonger[7927]: 2024-08-03 16:42:42 [7927] Wrote to /var/lib/certmonger/requests/20240803164242 Aug 03 16:42:42 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm certmonger[7927]: 2024-08-03 16:42:42 [7927] Wrote to /var/lib/certmonger/requests/20240803164242 Aug 03 16:42:42 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm certmonger[7927]: 2024-08-03 16:42:42 [7927] Wrote to /var/lib/certmonger/requests/20240803164242 Aug 03 16:42:42 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm certmonger[7927]: 2024-08-03 16:42:42 [7927] Wrote to /var/lib/certmonger/requests/20240803164242 Aug 03 16:42:42 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm certmonger[7927]: 2024-08-03 16:42:42 [7927] Wrote to /var/lib/certmonger/requests/20240803164242 Aug 03 16:42:42 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm certmonger[7927]: 2024-08-03 16:42:42 [7927] Wrote to /var/lib/certmonger/requests/20240803164242 Aug 03 16:42:42 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm certmonger[7927]: 2024-08-03 16:42:42 [7927] Wrote to /var/lib/certmonger/requests/20240803164242 Aug 03 16:42:42 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm certmonger[7927]: 2024-08-03 16:42:42 [7927] Wrote to /var/lib/certmonger/requests/20240803164242 Aug 03 16:42:42 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm certmonger[7927]: 2024-08-03 16:42:42 [7927] Wrote to /var/lib/certmonger/requests/20240803164242 Aug 03 16:42:42 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm certmonger[7927]: 2024-08-03 16:42:42 [7927] Wrote to /var/lib/certmonger/requests/20240803164242 Aug 03 16:42:42 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm certmonger[7987]: Certificate in file "/etc/pki/tls/certs/quadlet_demo.crt" issued by CA and saved. Aug 03 16:42:42 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm certmonger[7927]: 2024-08-03 16:42:42 [7927] Wrote to /var/lib/certmonger/requests/20240803164242 Aug 03 16:42:42 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3.12[8005]: ansible-slurp Invoked with path=/etc/pki/tls/certs/quadlet_demo.crt src=/etc/pki/tls/certs/quadlet_demo.crt Aug 03 16:42:42 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3.12[8023]: ansible-slurp Invoked with path=/etc/pki/tls/private/quadlet_demo.key src=/etc/pki/tls/private/quadlet_demo.key Aug 03 16:42:43 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3.12[8041]: ansible-slurp Invoked with path=/etc/pki/tls/certs/quadlet_demo.crt src=/etc/pki/tls/certs/quadlet_demo.crt Aug 03 16:42:43 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3.12[8059]: ansible-ansible.legacy.command Invoked with _raw_params=getcert stop-tracking -f /etc/pki/tls/certs/quadlet_demo.crt _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Aug 03 16:42:43 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm certmonger[7927]: 2024-08-03 16:42:43 [7927] Wrote to /var/lib/certmonger/requests/20240803164242 Aug 03 16:42:43 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3.12[8078]: ansible-file Invoked with path=/etc/pki/tls/certs/quadlet_demo.crt state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Aug 03 16:42:43 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3.12[8096]: ansible-file Invoked with path=/etc/pki/tls/private/quadlet_demo.key state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Aug 03 16:42:44 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3.12[8114]: ansible-file Invoked with path=/etc/pki/tls/certs/quadlet_demo.crt state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Aug 03 16:42:44 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm systemd[1]: man-db-cache-update.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit man-db-cache-update.service has successfully entered the 'dead' state. Aug 03 16:42:44 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm systemd[1]: Finished man-db-cache-update.service. ░░ Subject: A start job for unit man-db-cache-update.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit man-db-cache-update.service has finished successfully. ░░ ░░ The job identifier is 1702. Aug 03 16:42:44 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3.12[8132]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Aug 03 16:42:44 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm systemd[1]: run-rc4a75727ad5043baaa3a7236ce1063c8.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-rc4a75727ad5043baaa3a7236ce1063c8.service has successfully entered the 'dead' state. Aug 03 16:42:45 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3.12[8172]: ansible-ansible.legacy.dnf Invoked with name=['iptables-nft', 'podman', 'shadow-utils-subid'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Aug 03 16:43:04 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: SELinux: Converting 407 SID table entries... Aug 03 16:43:04 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: SELinux: policy capability network_peer_controls=1 Aug 03 16:43:04 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: SELinux: policy capability open_perms=1 Aug 03 16:43:04 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: SELinux: policy capability extended_socket_class=1 Aug 03 16:43:04 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: SELinux: policy capability always_check_network=0 Aug 03 16:43:04 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: SELinux: policy capability cgroup_seclabel=1 Aug 03 16:43:04 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: SELinux: policy capability nnp_nosuid_transition=1 Aug 03 16:43:04 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: SELinux: policy capability genfs_seclabel_symlinks=1 Aug 03 16:43:04 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: SELinux: policy capability ioctl_skip_cloexec=0 Aug 03 16:43:04 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: SELinux: policy capability userspace_initial_context=0 Aug 03 16:43:11 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: SELinux: Converting 407 SID table entries... Aug 03 16:43:11 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: SELinux: policy capability network_peer_controls=1 Aug 03 16:43:11 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: SELinux: policy capability open_perms=1 Aug 03 16:43:11 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: SELinux: policy capability extended_socket_class=1 Aug 03 16:43:11 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: SELinux: policy capability always_check_network=0 Aug 03 16:43:11 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: SELinux: policy capability cgroup_seclabel=1 Aug 03 16:43:11 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: SELinux: policy capability nnp_nosuid_transition=1 Aug 03 16:43:11 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: SELinux: policy capability genfs_seclabel_symlinks=1 Aug 03 16:43:11 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: SELinux: policy capability ioctl_skip_cloexec=0 Aug 03 16:43:11 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: SELinux: policy capability userspace_initial_context=0 Aug 03 16:43:19 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: SELinux: Converting 407 SID table entries... Aug 03 16:43:19 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: SELinux: policy capability network_peer_controls=1 Aug 03 16:43:19 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: SELinux: policy capability open_perms=1 Aug 03 16:43:19 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: SELinux: policy capability extended_socket_class=1 Aug 03 16:43:19 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: SELinux: policy capability always_check_network=0 Aug 03 16:43:19 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: SELinux: policy capability cgroup_seclabel=1 Aug 03 16:43:19 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: SELinux: policy capability nnp_nosuid_transition=1 Aug 03 16:43:19 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: SELinux: policy capability genfs_seclabel_symlinks=1 Aug 03 16:43:19 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: SELinux: policy capability ioctl_skip_cloexec=0 Aug 03 16:43:19 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: SELinux: policy capability userspace_initial_context=0 Aug 03 16:43:20 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm setsebool[8230]: The virt_use_nfs policy boolean was changed to 1 by root Aug 03 16:43:20 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm setsebool[8230]: The virt_sandbox_use_all_caps policy boolean was changed to 1 by root Aug 03 16:43:28 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: SELinux: Converting 409 SID table entries... Aug 03 16:43:28 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: SELinux: policy capability network_peer_controls=1 Aug 03 16:43:28 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: SELinux: policy capability open_perms=1 Aug 03 16:43:28 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: SELinux: policy capability extended_socket_class=1 Aug 03 16:43:28 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: SELinux: policy capability always_check_network=0 Aug 03 16:43:28 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: SELinux: policy capability cgroup_seclabel=1 Aug 03 16:43:28 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: SELinux: policy capability nnp_nosuid_transition=1 Aug 03 16:43:28 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: SELinux: policy capability genfs_seclabel_symlinks=1 Aug 03 16:43:28 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: SELinux: policy capability ioctl_skip_cloexec=0 Aug 03 16:43:28 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: SELinux: policy capability userspace_initial_context=0 Aug 03 16:43:36 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: SELinux: Converting 409 SID table entries... Aug 03 16:43:36 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: SELinux: policy capability network_peer_controls=1 Aug 03 16:43:36 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: SELinux: policy capability open_perms=1 Aug 03 16:43:36 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: SELinux: policy capability extended_socket_class=1 Aug 03 16:43:36 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: SELinux: policy capability always_check_network=0 Aug 03 16:43:36 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: SELinux: policy capability cgroup_seclabel=1 Aug 03 16:43:36 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: SELinux: policy capability nnp_nosuid_transition=1 Aug 03 16:43:36 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: SELinux: policy capability genfs_seclabel_symlinks=1 Aug 03 16:43:36 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: SELinux: policy capability ioctl_skip_cloexec=0 Aug 03 16:43:36 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: SELinux: policy capability userspace_initial_context=0 Aug 03 16:43:53 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm systemd[1]: Started run-r223c3e6d9ec443f1b7a3c5ab9689cd6e.service - /usr/bin/systemctl start man-db-cache-update. ░░ Subject: A start job for unit run-r223c3e6d9ec443f1b7a3c5ab9689cd6e.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit run-r223c3e6d9ec443f1b7a3c5ab9689cd6e.service has finished successfully. ░░ ░░ The job identifier is 1857. Aug 03 16:43:53 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm systemd[1]: Starting man-db-cache-update.service... ░░ Subject: A start job for unit man-db-cache-update.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit man-db-cache-update.service has begun execution. ░░ ░░ The job identifier is 1934. Aug 03 16:43:53 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm systemd[1]: Reload requested from client PID 9065 ('systemctl') (unit session-6.scope)... Aug 03 16:43:53 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm systemd[1]: Reloading... Aug 03 16:43:54 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm systemd[1]: Reloading finished in 276 ms. Aug 03 16:43:54 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm systemd[1]: Queuing reload/restart jobs for marked units… Aug 03 16:43:54 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm systemd[3738]: Created slice session.slice - User Core Session Slice. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 18. Aug 03 16:43:54 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm systemd[3738]: Starting dbus-broker.service - D-Bus User Message Bus... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 13. Aug 03 16:43:54 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm systemd[3738]: Started dbus-broker.service - D-Bus User Message Bus. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 13. Aug 03 16:43:54 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm dbus-broker-launch[9126]: Policy to allow eavesdropping in /usr/share/dbus-1/session.conf +31: Eavesdropping is deprecated and ignored Aug 03 16:43:54 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm dbus-broker-launch[9126]: Policy to allow eavesdropping in /usr/share/dbus-1/session.conf +33: Eavesdropping is deprecated and ignored Aug 03 16:43:54 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm dbus-broker-launch[9126]: Ready Aug 03 16:43:54 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm systemd[3738]: Reload requested from client PID 9125 ('systemctl')... Aug 03 16:43:54 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm systemd[3738]: Reloading... Aug 03 16:43:54 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm systemd[3738]: Reloading finished in 63 ms. Aug 03 16:43:54 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm systemd[3738]: Queuing reload/restart jobs for marked units… Aug 03 16:43:55 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm systemd[1]: man-db-cache-update.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit man-db-cache-update.service has successfully entered the 'dead' state. Aug 03 16:43:55 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm systemd[1]: Finished man-db-cache-update.service. ░░ Subject: A start job for unit man-db-cache-update.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit man-db-cache-update.service has finished successfully. ░░ ░░ The job identifier is 1934. Aug 03 16:43:55 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm systemd[1]: run-r223c3e6d9ec443f1b7a3c5ab9689cd6e.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-r223c3e6d9ec443f1b7a3c5ab9689cd6e.service has successfully entered the 'dead' state. Aug 03 16:43:56 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3.12[9435]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Aug 03 16:43:56 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3.12[9460]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None Aug 03 16:43:56 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3.12[9479]: ansible-getent Invoked with database=group key=0 fail_key=False service=None split=None Aug 03 16:43:56 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3.12[9498]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Aug 03 16:43:57 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3.12[9518]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Aug 03 16:43:58 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3.12[9536]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Aug 03 16:43:58 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3.12[9554]: ansible-ansible.legacy.dnf Invoked with name=['firewalld'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Aug 03 16:43:59 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3.12[9573]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False scope=system no_block=False state=None enabled=None force=None Aug 03 16:43:59 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3.12[9593]: ansible-ansible.legacy.systemd Invoked with name=firewalld state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Aug 03 16:43:59 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm systemd[1]: Reload requested from client PID 9596 ('systemctl') (unit session-6.scope)... Aug 03 16:43:59 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm systemd[1]: Reloading... Aug 03 16:43:59 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm systemd[1]: Reloading finished in 178 ms. Aug 03 16:43:59 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm systemd[1]: Starting firewalld.service - firewalld - dynamic firewall daemon... ░░ Subject: A start job for unit firewalld.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit firewalld.service has begun execution. ░░ ░░ The job identifier is 2012. Aug 03 16:44:00 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm systemd[1]: Started firewalld.service - firewalld - dynamic firewall daemon. ░░ Subject: A start job for unit firewalld.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit firewalld.service has finished successfully. ░░ ░░ The job identifier is 2012. Aug 03 16:44:01 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3.12[9678]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['8000/tcp'] permanent=True runtime=True state=enabled __report_changed=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Aug 03 16:44:01 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm systemd[1]: Starting polkit.service - Authorization Manager... ░░ Subject: A start job for unit polkit.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit polkit.service has begun execution. ░░ ░░ The job identifier is 2095. Aug 03 16:44:01 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm polkitd[9709]: Started polkitd version 124 Aug 03 16:44:01 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm polkitd[9709]: Loading rules from directory /etc/polkit-1/rules.d Aug 03 16:44:01 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm polkitd[9709]: Loading rules from directory /usr/share/polkit-1/rules.d Aug 03 16:44:01 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm polkitd[9709]: Finished loading, compiling and executing 2 rules Aug 03 16:44:01 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm systemd[1]: Started polkit.service - Authorization Manager. ░░ Subject: A start job for unit polkit.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit polkit.service has finished successfully. ░░ ░░ The job identifier is 2095. Aug 03 16:44:01 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm polkitd[9709]: Acquired the name org.freedesktop.PolicyKit1 on the system bus Aug 03 16:44:01 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm firewalld[9649]: ERROR: Backup of file '/etc/firewalld/zones/public.xml' failed: [Errno 13] Permission denied: '/etc/firewalld/zones/public.xml.old' Aug 03 16:44:01 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm firewalld[9649]: Traceback (most recent call last): File "/usr/lib/python3.12/site-packages/firewall/server/decorators.py", line 57, in _impl return func(*args, **kwargs) ^^^^^^^^^^^^^^^^^^^^^ File "/usr/lib/python3.12/site-packages/firewall/server/config_zone.py", line 280, in update2 self.obj = self.config.set_zone_config_dict(self.obj, settings) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/usr/lib/python3.12/site-packages/firewall/core/fw_config.py", line 811, in set_zone_config_dict zone_writer(x) File "/usr/lib/python3.12/site-packages/firewall/core/io/zone.py", line 506, in zone_writer f = io.open(name, mode="wt", encoding="UTF-8") ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ PermissionError: [Errno 13] Permission denied: '/etc/firewalld/zones/public.xml' Aug 03 16:44:01 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3.12[9735]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['9000/tcp'] permanent=True runtime=True state=enabled __report_changed=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Aug 03 16:44:01 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm firewalld[9649]: ERROR: Backup of file '/etc/firewalld/zones/public.xml' failed: [Errno 13] Permission denied: '/etc/firewalld/zones/public.xml.old' Aug 03 16:44:01 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm firewalld[9649]: Traceback (most recent call last): File "/usr/lib/python3.12/site-packages/firewall/server/decorators.py", line 57, in _impl return func(*args, **kwargs) ^^^^^^^^^^^^^^^^^^^^^ File "/usr/lib/python3.12/site-packages/firewall/server/config_zone.py", line 280, in update2 self.obj = self.config.set_zone_config_dict(self.obj, settings) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/usr/lib/python3.12/site-packages/firewall/core/fw_config.py", line 811, in set_zone_config_dict zone_writer(x) File "/usr/lib/python3.12/site-packages/firewall/core/io/zone.py", line 506, in zone_writer f = io.open(name, mode="wt", encoding="UTF-8") ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ PermissionError: [Errno 13] Permission denied: '/etc/firewalld/zones/public.xml' Aug 03 16:44:01 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3.12[9753]: ansible-ansible.legacy.command Invoked with _raw_params=journalctl -ex _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None TASK [Check] ******************************************************************* task path: /WORKDIR/git-weekly-cig48sz_fx/tests/tests_quadlet_demo.yml:136 Saturday 03 August 2024 16:44:02 +0000 (0:00:00.241) 0:01:28.867 ******* ok: [sut] => { "changed": false, "cmd": [ "podman", "ps", "-a" ], "delta": "0:00:00.092179", "end": "2024-08-03 16:44:02.267646", "rc": 0, "start": "2024-08-03 16:44:02.175467" } STDOUT: CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES TASK [Check pods] ************************************************************** task path: /WORKDIR/git-weekly-cig48sz_fx/tests/tests_quadlet_demo.yml:140 Saturday 03 August 2024 16:44:02 +0000 (0:00:00.287) 0:01:29.154 ******* ok: [sut] => { "changed": false, "cmd": [ "podman", "pod", "ps", "--ctr-ids", "--ctr-names", "--ctr-status" ], "delta": "0:00:00.028203", "end": "2024-08-03 16:44:02.487555", "failed_when_result": false, "rc": 0, "start": "2024-08-03 16:44:02.459352" } STDOUT: POD ID NAME STATUS CREATED INFRA ID IDS NAMES STATUS TASK [Check systemd] *********************************************************** task path: /WORKDIR/git-weekly-cig48sz_fx/tests/tests_quadlet_demo.yml:145 Saturday 03 August 2024 16:44:02 +0000 (0:00:00.220) 0:01:29.375 ******* ok: [sut] => { "changed": false, "cmd": "set -euo pipefail; systemctl list-units --all | grep quadlet", "delta": "0:00:00.013242", "end": "2024-08-03 16:44:02.699171", "failed_when_result": false, "rc": 1, "start": "2024-08-03 16:44:02.685929" } MSG: non-zero return code TASK [LS] ********************************************************************** task path: /WORKDIR/git-weekly-cig48sz_fx/tests/tests_quadlet_demo.yml:153 Saturday 03 August 2024 16:44:02 +0000 (0:00:00.209) 0:01:29.585 ******* ok: [sut] => { "changed": false, "cmd": [ "ls", "-alrtF", "/etc/systemd/system" ], "delta": "0:00:00.004444", "end": "2024-08-03 16:44:02.895322", "failed_when_result": false, "rc": 0, "start": "2024-08-03 16:44:02.890878" } STDOUT: total 12 drwxr-xr-x. 5 root root 47 Aug 1 08:05 ../ lrwxrwxrwx. 1 root root 43 Aug 1 08:05 dbus.service -> /usr/lib/systemd/system/dbus-broker.service drwxr-xr-x. 2 root root 32 Aug 1 08:05 getty.target.wants/ lrwxrwxrwx. 1 root root 37 Aug 1 08:05 ctrl-alt-del.target -> /usr/lib/systemd/system/reboot.target drwxr-xr-x. 2 root root 48 Aug 1 08:06 network-online.target.wants/ lrwxrwxrwx. 1 root root 57 Aug 1 08:06 dbus-org.freedesktop.nm-dispatcher.service -> /usr/lib/systemd/system/NetworkManager-dispatcher.service drwxr-xr-x. 2 root root 38 Aug 1 08:06 dev-virtio\x2dports-org.qemu.guest_agent.0.device.wants/ lrwxrwxrwx. 1 root root 41 Aug 1 08:08 default.target -> /usr/lib/systemd/system/multi-user.target drwxr-xr-x. 2 root root 104 Aug 1 08:22 timers.target.wants/ drwxr-xr-x. 2 root root 31 Aug 1 08:22 remote-fs.target.wants/ drwxr-xr-x. 2 root root 119 Aug 1 08:23 cloud-init.target.wants/ drwxr-xr-x. 2 root root 91 Aug 1 08:24 sockets.target.wants/ drwxr-xr-x. 2 root root 4096 Aug 1 08:24 sysinit.target.wants/ drwxr-xr-x. 2 root root 4096 Aug 3 16:43 multi-user.target.wants/ lrwxrwxrwx. 1 root root 41 Aug 3 16:43 dbus-org.fedoraproject.FirewallD1.service -> /usr/lib/systemd/system/firewalld.service drwxr-xr-x. 11 root root 4096 Aug 3 16:43 ./ TASK [Cleanup] ***************************************************************** task path: /WORKDIR/git-weekly-cig48sz_fx/tests/tests_quadlet_demo.yml:160 Saturday 03 August 2024 16:44:02 +0000 (0:00:00.197) 0:01:29.782 ******* included: fedora.linux_system_roles.podman for sut TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:3 Saturday 03 August 2024 16:44:02 +0000 (0:00:00.040) 0:01:29.823 ******* included: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml for sut TASK [fedora.linux_system_roles.podman : Ensure ansible_facts used by role] **** task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:3 Saturday 03 August 2024 16:44:03 +0000 (0:00:00.056) 0:01:29.879 ******* skipping: [sut] => { "changed": false, "false_condition": "__podman_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if system is ostree] ************ task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:11 Saturday 03 August 2024 16:44:03 +0000 (0:00:00.022) 0:01:29.902 ******* skipping: [sut] => { "changed": false, "false_condition": "not __podman_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag to indicate system is ostree] *** task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:16 Saturday 03 August 2024 16:44:03 +0000 (0:00:00.017) 0:01:29.919 ******* skipping: [sut] => { "changed": false, "false_condition": "not __podman_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:20 Saturday 03 August 2024 16:44:03 +0000 (0:00:00.017) 0:01:29.937 ******* ok: [sut] => (item=RedHat.yml) => { "ansible_facts": { "__podman_packages": [ "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/podman/vars/RedHat.yml" ], "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml" } skipping: [sut] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [sut] => (item=CentOS_10.yml) => { "ansible_facts": { "__podman_packages": [ "iptables-nft", "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } ok: [sut] => (item=CentOS_10.yml) => { "ansible_facts": { "__podman_packages": [ "iptables-nft", "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } TASK [fedora.linux_system_roles.podman : Gather the package facts] ************* task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 Saturday 03 August 2024 16:44:03 +0000 (0:00:00.044) 0:01:29.981 ******* ok: [sut] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Enable copr if requested] ************* task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:10 Saturday 03 August 2024 16:44:03 +0000 (0:00:00.549) 0:01:30.530 ******* skipping: [sut] => { "changed": false, "false_condition": "podman_use_copr | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure required packages are installed] *** task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:14 Saturday 03 August 2024 16:44:03 +0000 (0:00:00.019) 0:01:30.550 ******* skipping: [sut] => { "changed": false, "false_condition": "(__podman_packages | difference(ansible_facts.packages))", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get podman version] ******************* task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:22 Saturday 03 August 2024 16:44:03 +0000 (0:00:00.020) 0:01:30.570 ******* ok: [sut] => { "changed": false, "cmd": [ "podman", "--version" ], "delta": "0:00:00.025940", "end": "2024-08-03 16:44:03.903210", "rc": 0, "start": "2024-08-03 16:44:03.877270" } STDOUT: podman version 5.1.2 TASK [fedora.linux_system_roles.podman : Set podman version] ******************* task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:28 Saturday 03 August 2024 16:44:03 +0000 (0:00:00.220) 0:01:30.791 ******* ok: [sut] => { "ansible_facts": { "podman_version": "5.1.2" }, "changed": false } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.2 or later] *** task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:32 Saturday 03 August 2024 16:44:03 +0000 (0:00:00.019) 0:01:30.811 ******* skipping: [sut] => { "changed": false, "false_condition": "podman_version is version(\"4.2\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:39 Saturday 03 August 2024 16:44:03 +0000 (0:00:00.017) 0:01:30.828 ******* skipping: [sut] => { "changed": false, "false_condition": "podman_version is version(\"4.4\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:49 Saturday 03 August 2024 16:44:03 +0000 (0:00:00.021) 0:01:30.849 ******* META: end_host conditional evaluated to False, continuing execution for sut skipping: [sut] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for sut" } MSG: end_host conditional evaluated to false, continuing execution for sut TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:56 Saturday 03 August 2024 16:44:04 +0000 (0:00:00.023) 0:01:30.873 ******* included: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for sut TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 03 August 2024 16:44:04 +0000 (0:00:00.038) 0:01:30.912 ******* skipping: [sut] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 03 August 2024 16:44:04 +0000 (0:00:00.021) 0:01:30.933 ******* skipping: [sut] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 03 August 2024 16:44:04 +0000 (0:00:00.020) 0:01:30.954 ******* ok: [sut] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get group information] **************** task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:28 Saturday 03 August 2024 16:44:04 +0000 (0:00:00.051) 0:01:31.006 ******* ok: [sut] => { "ansible_facts": { "getent_group": { "root": [ "x", "0", "" ] } }, "changed": false } TASK [fedora.linux_system_roles.podman : Set group name] *********************** task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:35 Saturday 03 August 2024 16:44:04 +0000 (0:00:00.204) 0:01:31.210 ******* ok: [sut] => { "ansible_facts": { "__podman_group_name": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:39 Saturday 03 August 2024 16:44:04 +0000 (0:00:00.025) 0:01:31.235 ******* ok: [sut] => { "changed": false, "stat": { "atime": 1722703436.9751415, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "86395ad7ce62834c967dc50f963a68f042029188", "ctime": 1722703416.657093, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 4660166, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1719187200.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15728, "uid": 0, "version": "3584316354", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check user with getsubids] ************ task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:50 Saturday 03 August 2024 16:44:04 +0000 (0:00:00.203) 0:01:31.439 ******* skipping: [sut] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check group with getsubids] *********** task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:55 Saturday 03 August 2024 16:44:04 +0000 (0:00:00.017) 0:01:31.457 ******* skipping: [sut] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:60 Saturday 03 August 2024 16:44:04 +0000 (0:00:00.017) 0:01:31.474 ******* skipping: [sut] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:74 Saturday 03 August 2024 16:44:04 +0000 (0:00:00.017) 0:01:31.492 ******* skipping: [sut] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:79 Saturday 03 August 2024 16:44:04 +0000 (0:00:00.016) 0:01:31.508 ******* skipping: [sut] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:84 Saturday 03 August 2024 16:44:04 +0000 (0:00:00.016) 0:01:31.525 ******* skipping: [sut] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:94 Saturday 03 August 2024 16:44:04 +0000 (0:00:00.016) 0:01:31.541 ******* skipping: [sut] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if group not in subgid file] ***** task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:101 Saturday 03 August 2024 16:44:04 +0000 (0:00:00.016) 0:01:31.558 ******* skipping: [sut] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set config file paths] **************** task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:62 Saturday 03 August 2024 16:44:04 +0000 (0:00:00.015) 0:01:31.573 ******* ok: [sut] => { "ansible_facts": { "__podman_container_conf_file": "/etc/containers/containers.conf.d/50-systemroles.conf", "__podman_policy_json_file": "/etc/containers/policy.json", "__podman_registries_conf_file": "/etc/containers/registries.conf.d/50-systemroles.conf", "__podman_storage_conf_file": "/etc/containers/storage.conf" }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle container.conf.d] ************** task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:71 Saturday 03 August 2024 16:44:04 +0000 (0:00:00.035) 0:01:31.609 ******* included: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml for sut TASK [fedora.linux_system_roles.podman : Ensure containers.d exists] *********** task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:5 Saturday 03 August 2024 16:44:04 +0000 (0:00:00.035) 0:01:31.645 ******* skipping: [sut] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update container config file] ********* task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:13 Saturday 03 August 2024 16:44:04 +0000 (0:00:00.019) 0:01:31.664 ******* skipping: [sut] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle registries.conf.d] ************* task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:74 Saturday 03 August 2024 16:44:04 +0000 (0:00:00.017) 0:01:31.682 ******* included: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml for sut TASK [fedora.linux_system_roles.podman : Ensure registries.d exists] *********** task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:5 Saturday 03 August 2024 16:44:04 +0000 (0:00:00.035) 0:01:31.717 ******* skipping: [sut] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update registries config file] ******** task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:13 Saturday 03 August 2024 16:44:04 +0000 (0:00:00.018) 0:01:31.736 ******* skipping: [sut] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle storage.conf] ****************** task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:77 Saturday 03 August 2024 16:44:04 +0000 (0:00:00.016) 0:01:31.752 ******* included: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml for sut TASK [fedora.linux_system_roles.podman : Ensure storage.conf parent dir exists] *** task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:5 Saturday 03 August 2024 16:44:04 +0000 (0:00:00.060) 0:01:31.813 ******* skipping: [sut] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update storage config file] *********** task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:13 Saturday 03 August 2024 16:44:04 +0000 (0:00:00.018) 0:01:31.832 ******* skipping: [sut] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle policy.json] ******************* task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:80 Saturday 03 August 2024 16:44:04 +0000 (0:00:00.017) 0:01:31.850 ******* included: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml for sut TASK [fedora.linux_system_roles.podman : Ensure policy.json parent dir exists] *** task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:6 Saturday 03 August 2024 16:44:05 +0000 (0:00:00.038) 0:01:31.888 ******* skipping: [sut] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat the policy.json file] ************ task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:14 Saturday 03 August 2024 16:44:05 +0000 (0:00:00.018) 0:01:31.907 ******* skipping: [sut] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get the existing policy.json] ********* task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:19 Saturday 03 August 2024 16:44:05 +0000 (0:00:00.018) 0:01:31.926 ******* skipping: [sut] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Write new policy.json file] *********** task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:25 Saturday 03 August 2024 16:44:05 +0000 (0:00:00.017) 0:01:31.943 ******* skipping: [sut] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [Manage firewall for specified ports] ************************************* task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:86 Saturday 03 August 2024 16:44:05 +0000 (0:00:00.018) 0:01:31.962 ******* included: fedora.linux_system_roles.firewall for sut TASK [fedora.linux_system_roles.firewall : Setup firewalld] ******************** task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:2 Saturday 03 August 2024 16:44:05 +0000 (0:00:00.063) 0:01:32.025 ******* included: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml for sut TASK [fedora.linux_system_roles.firewall : Ensure ansible_facts used by role] *** task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:2 Saturday 03 August 2024 16:44:05 +0000 (0:00:00.035) 0:01:32.061 ******* skipping: [sut] => { "changed": false, "false_condition": "__firewall_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Check if system is ostree] ********** task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:10 Saturday 03 August 2024 16:44:05 +0000 (0:00:00.024) 0:01:32.085 ******* skipping: [sut] => { "changed": false, "false_condition": "not __firewall_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Set flag to indicate system is ostree] *** task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:15 Saturday 03 August 2024 16:44:05 +0000 (0:00:00.017) 0:01:32.102 ******* skipping: [sut] => { "changed": false, "false_condition": "not __firewall_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Check if transactional-update exists in /sbin] *** task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:22 Saturday 03 August 2024 16:44:05 +0000 (0:00:00.017) 0:01:32.120 ******* skipping: [sut] => { "changed": false, "false_condition": "not __firewall_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Set flag if transactional-update exists] *** task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:27 Saturday 03 August 2024 16:44:05 +0000 (0:00:00.017) 0:01:32.138 ******* skipping: [sut] => { "changed": false, "false_condition": "not __firewall_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Install firewalld] ****************** task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:31 Saturday 03 August 2024 16:44:05 +0000 (0:00:00.017) 0:01:32.155 ******* ok: [sut] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: firewalld TASK [fedora.linux_system_roles.firewall : Notify user that reboot is needed to apply changes] *** task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:43 Saturday 03 August 2024 16:44:05 +0000 (0:00:00.592) 0:01:32.748 ******* skipping: [sut] => { "false_condition": "__firewall_is_transactional | d(false)" } TASK [fedora.linux_system_roles.firewall : Reboot transactional update systems] *** task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:48 Saturday 03 August 2024 16:44:05 +0000 (0:00:00.052) 0:01:32.800 ******* skipping: [sut] => { "changed": false, "false_condition": "__firewall_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Fail if reboot is needed and not set] *** task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:53 Saturday 03 August 2024 16:44:05 +0000 (0:00:00.019) 0:01:32.819 ******* skipping: [sut] => { "changed": false, "false_condition": "__firewall_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Collect service facts] ************** task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:5 Saturday 03 August 2024 16:44:05 +0000 (0:00:00.019) 0:01:32.839 ******* skipping: [sut] => { "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Attempt to stop and disable conflicting services] *** task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:9 Saturday 03 August 2024 16:44:05 +0000 (0:00:00.016) 0:01:32.855 ******* skipping: [sut] => (item=nftables) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": "nftables", "skip_reason": "Conditional result was False" } skipping: [sut] => (item=iptables) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": "iptables", "skip_reason": "Conditional result was False" } skipping: [sut] => (item=ufw) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": "ufw", "skip_reason": "Conditional result was False" } skipping: [sut] => { "changed": false } MSG: All items skipped TASK [fedora.linux_system_roles.firewall : Unmask firewalld service] *********** task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:22 Saturday 03 August 2024 16:44:06 +0000 (0:00:00.021) 0:01:32.877 ******* ok: [sut] => { "changed": false, "name": "firewalld", "status": { "AccessSELinuxContext": "system_u:object_r:firewalld_unit_file_t:s0", "ActiveEnterTimestamp": "Sat 2024-08-03 16:44:00 UTC", "ActiveEnterTimestampMonotonic": "347701858", "ActiveExitTimestampMonotonic": "0", "ActiveState": "active", "After": "sysinit.target dbus-broker.service polkit.service dbus.socket system.slice basic.target", "AllowIsolate": "no", "AssertResult": "yes", "AssertTimestamp": "Sat 2024-08-03 16:43:59 UTC", "AssertTimestampMonotonic": "346812977", "Before": "multi-user.target network-pre.target shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "BusName": "org.fedoraproject.FirewallD1", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "404147000", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "yes", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_tty_config cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Sat 2024-08-03 16:43:59 UTC", "ConditionTimestampMonotonic": "346812974", "ConfigurationDirectoryMode": "0755", "Conflicts": "ipset.service ip6tables.service ebtables.service iptables.service shutdown.target", "ControlGroup": "/system.slice/firewalld.service", "ControlGroupId": "4998", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "firewalld - dynamic firewall daemon", "DeviceAllow": "char-rtc r", "DevicePolicy": "closed", "Documentation": "\"man:firewalld(1)\"", "DynamicUser": "no", "EffectiveMemoryHigh": "3700936704", "EffectiveMemoryMax": "3700936704", "EffectiveTasksMax": "22402", "EnvironmentFiles": "/etc/sysconfig/firewalld (ignore_errors=yes)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestamp": "Sat 2024-08-03 16:43:59 UTC", "ExecMainHandoffTimestampMonotonic": "346844424", "ExecMainPID": "9649", "ExecMainStartTimestamp": "Sat 2024-08-03 16:43:59 UTC", "ExecMainStartTimestampMonotonic": "346814964", "ExecMainStatus": "0", "ExecReload": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecReloadEx": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStart": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; ignore_errors=no ; start_time=[Sat 2024-08-03 16:43:59 UTC] ; stop_time=[n/a] ; pid=9649 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; flags= ; start_time=[Sat 2024-08-03 16:43:59 UTC] ; stop_time=[n/a] ; pid=9649 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/usr/lib/systemd/system/firewalld.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "firewalld.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestamp": "Sat 2024-08-03 16:43:59 UTC", "InactiveExitTimestampMonotonic": "346815440", "InvocationID": "eeb2fa61f8b54f868da3a23f33de5528", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "14001", "LimitNPROCSoft": "14001", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "14001", "LimitSIGPENDINGSoft": "14001", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "yes", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "9649", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3202433024", "MemoryCurrent": "34340864", "MemoryDenyWriteExecute": "yes", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "36028416", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "0", "MemorySwapMax": "infinity", "MemorySwapPeak": "0", "MemoryZSwapCurrent": "0", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "firewalld.service dbus-org.fedoraproject.FirewallD1.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "yes", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "yes", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "yes", "ProtectControlGroups": "yes", "ProtectHome": "yes", "ProtectHostname": "yes", "ProtectKernelLogs": "yes", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "yes", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "sysinit.target dbus.socket system.slice", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "yes", "RestrictSUIDSGID": "yes", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "StandardError": "null", "StandardInput": "null", "StandardOutput": "null", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestamp": "Sat 2024-08-03 16:44:00 UTC", "StateChangeTimestampMonotonic": "347701858", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "running", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallArchitectures": "native", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "2", "TasksMax": "22402", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "dbus", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "enabled", "UnitFileState": "enabled", "UtmpMode": "init", "WantedBy": "multi-user.target", "Wants": "network-pre.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.firewall : Enable and start firewalld service] *** task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:28 Saturday 03 August 2024 16:44:06 +0000 (0:00:00.348) 0:01:33.225 ******* ok: [sut] => { "changed": false, "enabled": true, "name": "firewalld", "state": "started", "status": { "AccessSELinuxContext": "system_u:object_r:firewalld_unit_file_t:s0", "ActiveEnterTimestamp": "Sat 2024-08-03 16:44:00 UTC", "ActiveEnterTimestampMonotonic": "347701858", "ActiveExitTimestampMonotonic": "0", "ActiveState": "active", "After": "sysinit.target dbus-broker.service polkit.service dbus.socket system.slice basic.target", "AllowIsolate": "no", "AssertResult": "yes", "AssertTimestamp": "Sat 2024-08-03 16:43:59 UTC", "AssertTimestampMonotonic": "346812977", "Before": "multi-user.target network-pre.target shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "BusName": "org.fedoraproject.FirewallD1", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "404147000", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "yes", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_tty_config cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Sat 2024-08-03 16:43:59 UTC", "ConditionTimestampMonotonic": "346812974", "ConfigurationDirectoryMode": "0755", "Conflicts": "ipset.service ip6tables.service ebtables.service iptables.service shutdown.target", "ControlGroup": "/system.slice/firewalld.service", "ControlGroupId": "4998", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "firewalld - dynamic firewall daemon", "DeviceAllow": "char-rtc r", "DevicePolicy": "closed", "Documentation": "\"man:firewalld(1)\"", "DynamicUser": "no", "EffectiveMemoryHigh": "3700936704", "EffectiveMemoryMax": "3700936704", "EffectiveTasksMax": "22402", "EnvironmentFiles": "/etc/sysconfig/firewalld (ignore_errors=yes)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestamp": "Sat 2024-08-03 16:43:59 UTC", "ExecMainHandoffTimestampMonotonic": "346844424", "ExecMainPID": "9649", "ExecMainStartTimestamp": "Sat 2024-08-03 16:43:59 UTC", "ExecMainStartTimestampMonotonic": "346814964", "ExecMainStatus": "0", "ExecReload": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecReloadEx": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStart": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; ignore_errors=no ; start_time=[Sat 2024-08-03 16:43:59 UTC] ; stop_time=[n/a] ; pid=9649 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; flags= ; start_time=[Sat 2024-08-03 16:43:59 UTC] ; stop_time=[n/a] ; pid=9649 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/usr/lib/systemd/system/firewalld.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "firewalld.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestamp": "Sat 2024-08-03 16:43:59 UTC", "InactiveExitTimestampMonotonic": "346815440", "InvocationID": "eeb2fa61f8b54f868da3a23f33de5528", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "14001", "LimitNPROCSoft": "14001", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "14001", "LimitSIGPENDINGSoft": "14001", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "yes", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "9649", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3202187264", "MemoryCurrent": "34340864", "MemoryDenyWriteExecute": "yes", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "36028416", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "0", "MemorySwapMax": "infinity", "MemorySwapPeak": "0", "MemoryZSwapCurrent": "0", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "firewalld.service dbus-org.fedoraproject.FirewallD1.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "yes", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "yes", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "yes", "ProtectControlGroups": "yes", "ProtectHome": "yes", "ProtectHostname": "yes", "ProtectKernelLogs": "yes", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "yes", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "sysinit.target dbus.socket system.slice", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "yes", "RestrictSUIDSGID": "yes", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "StandardError": "null", "StandardInput": "null", "StandardOutput": "null", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestamp": "Sat 2024-08-03 16:44:00 UTC", "StateChangeTimestampMonotonic": "347701858", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "running", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallArchitectures": "native", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "2", "TasksMax": "22402", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "dbus", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "enabled", "UnitFileState": "enabled", "UtmpMode": "init", "WantedBy": "multi-user.target", "Wants": "network-pre.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.firewall : Check if previous replaced is defined] *** task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:34 Saturday 03 August 2024 16:44:06 +0000 (0:00:00.347) 0:01:33.573 ******* ok: [sut] => { "ansible_facts": { "__firewall_previous_replaced": false, "__firewall_python_cmd": "/usr/bin/python3.12", "__firewall_report_changed": true }, "changed": false } TASK [fedora.linux_system_roles.firewall : Get config files, checksums before and remove] *** task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:43 Saturday 03 August 2024 16:44:06 +0000 (0:00:00.025) 0:01:33.599 ******* skipping: [sut] => { "changed": false, "false_condition": "__firewall_previous_replaced | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Tell firewall module it is able to report changed] *** task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:55 Saturday 03 August 2024 16:44:06 +0000 (0:00:00.018) 0:01:33.617 ******* skipping: [sut] => { "changed": false, "false_condition": "__firewall_previous_replaced | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Configure firewall] ***************** task path: /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:71 Saturday 03 August 2024 16:44:06 +0000 (0:00:00.017) 0:01:33.634 ******* failed: [sut] (item={'port': '8000/tcp', 'state': 'enabled'}) => { "ansible_loop_var": "item", "changed": false, "item": { "port": "8000/tcp", "state": "enabled" } } MSG: [Errno 13] Permission denied: '/etc/firewalld/zones/public.xml' failed: [sut] (item={'port': '9000/tcp', 'state': 'enabled'}) => { "ansible_loop_var": "item", "changed": false, "item": { "port": "9000/tcp", "state": "enabled" } } MSG: [Errno 13] Permission denied: '/etc/firewalld/zones/public.xml' TASK [Debug] ******************************************************************* task path: /WORKDIR/git-weekly-cig48sz_fx/tests/tests_quadlet_demo.yml:187 Saturday 03 August 2024 16:44:07 +0000 (0:00:00.569) 0:01:34.204 ******* ok: [sut] => { "changed": false, "cmd": "exec 1>&2\nset -x\nset -o pipefail\nsystemctl list-units --plain -l --all | grep quadlet || :\nsystemctl list-unit-files --all | grep quadlet || :\nsystemctl list-units --plain --failed -l --all | grep quadlet || :\n", "delta": "0:00:00.366680", "end": "2024-08-03 16:44:07.876226", "rc": 0, "start": "2024-08-03 16:44:07.509546" } STDERR: + set -o pipefail + systemctl list-units --plain -l --all + grep quadlet + : + systemctl list-unit-files --all + grep quadlet + : + systemctl list-units --plain --failed -l --all + grep quadlet + : TASK [Get journald] ************************************************************ task path: /WORKDIR/git-weekly-cig48sz_fx/tests/tests_quadlet_demo.yml:197 Saturday 03 August 2024 16:44:07 +0000 (0:00:00.559) 0:01:34.763 ******* fatal: [sut]: FAILED! => { "changed": false, "cmd": [ "journalctl", "-ex" ], "delta": "0:00:00.026869", "end": "2024-08-03 16:44:08.098592", "failed_when_result": true, "rc": 0, "start": "2024-08-03 16:44:08.071723" } STDOUT: Aug 03 16:38:29 localhost systemd[1]: Starting systemd-random-seed.service - Load/Save OS Random Seed... Aug 03 16:38:29 localhost kernel: device-mapper: uevent: version 1.0.3 Aug 03 16:38:29 localhost systemd[1]: systemd-tpm2-setup.service - TPM SRK Setup was skipped because of an unmet condition check (ConditionSecurity=measured-uki). Aug 03 16:38:29 localhost systemd[1]: modprobe@fuse.service: Deactivated successfully. Aug 03 16:38:29 localhost kernel: device-mapper: ioctl: 4.48.0-ioctl (2023-03-01) initialised: dm-devel@lists.linux.dev Aug 03 16:38:29 localhost systemd[1]: Finished modprobe@fuse.service - Load Kernel Module fuse. Aug 03 16:38:29 localhost systemd[1]: modprobe@dm_mod.service: Deactivated successfully. Aug 03 16:38:29 localhost systemd[1]: Finished modprobe@dm_mod.service - Load Kernel Module dm_mod. Aug 03 16:38:29 localhost systemd[1]: Finished systemd-network-generator.service - Generate network units from Kernel command line. Aug 03 16:38:29 localhost systemd[1]: Finished systemd-udev-load-credentials.service - Load udev Rules from Credentials. Aug 03 16:38:29 localhost systemd[1]: systemd-repart.service - Repartition Root Disk was skipped because no trigger condition checks were met. Aug 03 16:38:29 localhost systemd-journald[476]: Journal started ░░ Subject: The journal has been started ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The system journal process has started up, opened the journal ░░ files for writing and is now ready to process requests. Aug 03 16:38:29 localhost systemd-journald[476]: Runtime Journal (/run/log/journal/ec255fe88ae7744a0ac9c75566afb243) is 8M, max 70.5M, 62.5M free. ░░ Subject: Disk space used by the journal ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ Runtime Journal (/run/log/journal/ec255fe88ae7744a0ac9c75566afb243) is currently using 8M. ░░ Maximum allowed usage is set to 70.5M. ░░ Leaving at least 35.2M free (of currently available 689.8M of disk space). ░░ Enforced usage limit is thus 70.5M, of which 62.5M are still available. ░░ ░░ The limits controlling how much disk space is used by the journal may ░░ be configured with SystemMaxUse=, SystemKeepFree=, SystemMaxFileSize=, ░░ RuntimeMaxUse=, RuntimeKeepFree=, RuntimeMaxFileSize= settings in ░░ /etc/systemd/journald.conf. See journald.conf(5) for details. Aug 03 16:38:29 localhost systemd[1]: Queued start job for default target multi-user.target. Aug 03 16:38:29 localhost systemd[1]: systemd-journald.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-journald.service has successfully entered the 'dead' state. Aug 03 16:38:29 localhost systemd[1]: Started systemd-journald.service - Journal Service. Aug 03 16:38:29 localhost systemd[1]: Finished systemd-sysctl.service - Apply Kernel Variables. ░░ Subject: A start job for unit systemd-sysctl.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-sysctl.service has finished successfully. ░░ ░░ The job identifier is 160. Aug 03 16:38:29 localhost systemd[1]: Starting systemd-journal-flush.service - Flush Journal to Persistent Storage... ░░ Subject: A start job for unit systemd-journal-flush.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-journal-flush.service has begun execution. ░░ ░░ The job identifier is 195. Aug 03 16:38:29 localhost systemd[1]: Finished systemd-udev-trigger.service - Coldplug All udev Devices. ░░ Subject: A start job for unit systemd-udev-trigger.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-udev-trigger.service has finished successfully. ░░ ░░ The job identifier is 175. Aug 03 16:38:29 localhost systemd[1]: Finished systemd-random-seed.service - Load/Save OS Random Seed. ░░ Subject: A start job for unit systemd-random-seed.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-random-seed.service has finished successfully. ░░ ░░ The job identifier is 193. Aug 03 16:38:29 localhost systemd-journald[476]: Runtime Journal (/run/log/journal/ec255fe88ae7744a0ac9c75566afb243) is 8M, max 70.5M, 62.5M free. ░░ Subject: Disk space used by the journal ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ Runtime Journal (/run/log/journal/ec255fe88ae7744a0ac9c75566afb243) is currently using 8M. ░░ Maximum allowed usage is set to 70.5M. ░░ Leaving at least 35.2M free (of currently available 689.8M of disk space). ░░ Enforced usage limit is thus 70.5M, of which 62.5M are still available. ░░ ░░ The limits controlling how much disk space is used by the journal may ░░ be configured with SystemMaxUse=, SystemKeepFree=, SystemMaxFileSize=, ░░ RuntimeMaxUse=, RuntimeKeepFree=, RuntimeMaxFileSize= settings in ░░ /etc/systemd/journald.conf. See journald.conf(5) for details. Aug 03 16:38:30 localhost systemd-journald[476]: Received client request to flush runtime journal. Aug 03 16:38:30 localhost systemd[1]: Finished systemd-journal-flush.service - Flush Journal to Persistent Storage. ░░ Subject: A start job for unit systemd-journal-flush.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-journal-flush.service has finished successfully. ░░ ░░ The job identifier is 195. Aug 03 16:38:30 localhost systemd[1]: Mounting sys-fs-fuse-connections.mount - FUSE Control File System... ░░ Subject: A start job for unit sys-fs-fuse-connections.mount has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sys-fs-fuse-connections.mount has begun execution. ░░ ░░ The job identifier is 158. Aug 03 16:38:30 localhost systemd[1]: Mounted sys-fs-fuse-connections.mount - FUSE Control File System. ░░ Subject: A start job for unit sys-fs-fuse-connections.mount has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sys-fs-fuse-connections.mount has finished successfully. ░░ ░░ The job identifier is 158. Aug 03 16:38:30 localhost systemd[1]: Finished lvm2-monitor.service - Monitoring of LVM2 mirrors, snapshots etc. using dmeventd or progress polling. ░░ Subject: A start job for unit lvm2-monitor.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit lvm2-monitor.service has finished successfully. ░░ ░░ The job identifier is 147. Aug 03 16:38:31 localhost systemd[1]: Finished systemd-tmpfiles-setup-dev-early.service - Create Static Device Nodes in /dev gracefully. ░░ Subject: A start job for unit systemd-tmpfiles-setup-dev-early.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-tmpfiles-setup-dev-early.service has finished successfully. ░░ ░░ The job identifier is 131. Aug 03 16:38:31 localhost systemd[1]: systemd-sysusers.service - Create System Users was skipped because no trigger condition checks were met. ░░ Subject: A start job for unit systemd-sysusers.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-sysusers.service has finished successfully. ░░ ░░ The job identifier is 137. Aug 03 16:38:31 localhost systemd[1]: Starting systemd-tmpfiles-setup-dev.service - Create Static Device Nodes in /dev... ░░ Subject: A start job for unit systemd-tmpfiles-setup-dev.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-tmpfiles-setup-dev.service has begun execution. ░░ ░░ The job identifier is 128. Aug 03 16:38:31 localhost systemd[1]: Finished systemd-tmpfiles-setup-dev.service - Create Static Device Nodes in /dev. ░░ Subject: A start job for unit systemd-tmpfiles-setup-dev.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-tmpfiles-setup-dev.service has finished successfully. ░░ ░░ The job identifier is 128. Aug 03 16:38:31 localhost systemd[1]: Reached target local-fs-pre.target - Preparation for Local File Systems. ░░ Subject: A start job for unit local-fs-pre.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit local-fs-pre.target has finished successfully. ░░ ░░ The job identifier is 129. Aug 03 16:38:31 localhost systemd[1]: Reached target local-fs.target - Local File Systems. ░░ Subject: A start job for unit local-fs.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit local-fs.target has finished successfully. ░░ ░░ The job identifier is 138. Aug 03 16:38:31 localhost systemd[1]: Listening on systemd-bootctl.socket - Boot Entries Service Socket. ░░ Subject: A start job for unit systemd-bootctl.socket has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-bootctl.socket has finished successfully. ░░ ░░ The job identifier is 220. Aug 03 16:38:31 localhost systemd[1]: Listening on systemd-sysext.socket - System Extension Image Management. ░░ Subject: A start job for unit systemd-sysext.socket has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-sysext.socket has finished successfully. ░░ ░░ The job identifier is 212. Aug 03 16:38:31 localhost systemd[1]: ldconfig.service - Rebuild Dynamic Linker Cache was skipped because no trigger condition checks were met. ░░ Subject: A start job for unit ldconfig.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit ldconfig.service has finished successfully. ░░ ░░ The job identifier is 167. Aug 03 16:38:31 localhost systemd[1]: selinux-autorelabel-mark.service - Mark the need to relabel after reboot was skipped because of an unmet condition check (ConditionSecurity=!selinux). ░░ Subject: A start job for unit selinux-autorelabel-mark.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit selinux-autorelabel-mark.service has finished successfully. ░░ ░░ The job identifier is 161. Aug 03 16:38:31 localhost systemd[1]: systemd-binfmt.service - Set Up Additional Binary Formats was skipped because no trigger condition checks were met. ░░ Subject: A start job for unit systemd-binfmt.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-binfmt.service has finished successfully. ░░ ░░ The job identifier is 188. Aug 03 16:38:31 localhost systemd[1]: systemd-boot-random-seed.service - Update Boot Loader Random Seed was skipped because no trigger condition checks were met. ░░ Subject: A start job for unit systemd-boot-random-seed.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-boot-random-seed.service has finished successfully. ░░ ░░ The job identifier is 136. Aug 03 16:38:31 localhost systemd[1]: systemd-confext.service - Merge System Configuration Images into /etc/ was skipped because no trigger condition checks were met. ░░ Subject: A start job for unit systemd-confext.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-confext.service has finished successfully. ░░ ░░ The job identifier is 177. Aug 03 16:38:31 localhost systemd[1]: systemd-sysext.service - Merge System Extension Images into /usr/ and /opt/ was skipped because no trigger condition checks were met. ░░ Subject: A start job for unit systemd-sysext.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-sysext.service has finished successfully. ░░ ░░ The job identifier is 191. Aug 03 16:38:31 localhost systemd[1]: Starting systemd-tmpfiles-setup.service - Create System Files and Directories... ░░ Subject: A start job for unit systemd-tmpfiles-setup.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-tmpfiles-setup.service has begun execution. ░░ ░░ The job identifier is 149. Aug 03 16:38:31 localhost systemd[1]: Starting systemd-udevd.service - Rule-based Manager for Device Events and Files... ░░ Subject: A start job for unit systemd-udevd.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-udevd.service has begun execution. ░░ ░░ The job identifier is 141. Aug 03 16:38:31 localhost systemd-udevd[517]: Using default interface naming scheme 'rhel-10.0'. Aug 03 16:38:31 localhost systemd[1]: Finished systemd-tmpfiles-setup.service - Create System Files and Directories. ░░ Subject: A start job for unit systemd-tmpfiles-setup.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-tmpfiles-setup.service has finished successfully. ░░ ░░ The job identifier is 149. Aug 03 16:38:31 localhost systemd[1]: Mounting var-lib-nfs-rpc_pipefs.mount - RPC Pipe File System... ░░ Subject: A start job for unit var-lib-nfs-rpc_pipefs.mount has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit var-lib-nfs-rpc_pipefs.mount has begun execution. ░░ ░░ The job identifier is 262. Aug 03 16:38:31 localhost systemd[1]: Starting audit-rules.service - Load Audit Rules... ░░ Subject: A start job for unit audit-rules.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit audit-rules.service has begun execution. ░░ ░░ The job identifier is 231. Aug 03 16:38:31 localhost systemd[1]: systemd-firstboot.service - First Boot Wizard was skipped because of an unmet condition check (ConditionFirstBoot=yes). ░░ Subject: A start job for unit systemd-firstboot.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-firstboot.service has finished successfully. ░░ ░░ The job identifier is 133. Aug 03 16:38:31 localhost systemd[1]: first-boot-complete.target - First Boot Complete was skipped because of an unmet condition check (ConditionFirstBoot=yes). ░░ Subject: A start job for unit first-boot-complete.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit first-boot-complete.target has finished successfully. ░░ ░░ The job identifier is 134. Aug 03 16:38:31 localhost systemd[1]: systemd-journal-catalog-update.service - Rebuild Journal Catalog was skipped because of an unmet condition check (ConditionNeedsUpdate=/var). ░░ Subject: A start job for unit systemd-journal-catalog-update.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-journal-catalog-update.service has finished successfully. ░░ ░░ The job identifier is 163. Aug 03 16:38:31 localhost systemd[1]: Starting systemd-machine-id-commit.service - Save Transient machine-id to Disk... ░░ Subject: A start job for unit systemd-machine-id-commit.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-machine-id-commit.service has begun execution. ░░ ░░ The job identifier is 168. Aug 03 16:38:31 localhost systemd[1]: systemd-update-done.service - Update is Completed was skipped because no trigger condition checks were met. ░░ Subject: A start job for unit systemd-update-done.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-update-done.service has finished successfully. ░░ ░░ The job identifier is 185. Aug 03 16:38:31 localhost kernel: RPC: Registered named UNIX socket transport module. Aug 03 16:38:31 localhost kernel: RPC: Registered udp transport module. Aug 03 16:38:31 localhost kernel: RPC: Registered tcp transport module. Aug 03 16:38:31 localhost kernel: RPC: Registered tcp-with-tls transport module. Aug 03 16:38:31 localhost kernel: RPC: Registered tcp NFSv4.1 backchannel transport module. Aug 03 16:38:31 localhost systemd[1]: Mounted var-lib-nfs-rpc_pipefs.mount - RPC Pipe File System. ░░ Subject: A start job for unit var-lib-nfs-rpc_pipefs.mount has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit var-lib-nfs-rpc_pipefs.mount has finished successfully. ░░ ░░ The job identifier is 262. Aug 03 16:38:31 localhost systemd[1]: Reached target rpc_pipefs.target. ░░ Subject: A start job for unit rpc_pipefs.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit rpc_pipefs.target has finished successfully. ░░ ░░ The job identifier is 261. Aug 03 16:38:31 localhost systemd[1]: etc-machine\x2did.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit etc-machine\x2did.mount has successfully entered the 'dead' state. Aug 03 16:38:31 localhost systemd[1]: Finished systemd-machine-id-commit.service - Save Transient machine-id to Disk. ░░ Subject: A start job for unit systemd-machine-id-commit.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-machine-id-commit.service has finished successfully. ░░ ░░ The job identifier is 168. Aug 03 16:38:32 localhost augenrules[521]: /sbin/augenrules: No change Aug 03 16:38:32 localhost augenrules[537]: No rules Aug 03 16:38:32 localhost augenrules[537]: enabled 0 Aug 03 16:38:32 localhost augenrules[537]: failure 1 Aug 03 16:38:32 localhost augenrules[537]: pid 0 Aug 03 16:38:32 localhost augenrules[537]: rate_limit 0 Aug 03 16:38:32 localhost augenrules[537]: backlog_limit 8192 Aug 03 16:38:32 localhost augenrules[537]: lost 0 Aug 03 16:38:32 localhost augenrules[537]: backlog 0 Aug 03 16:38:32 localhost augenrules[537]: backlog_wait_time 60000 Aug 03 16:38:32 localhost augenrules[537]: backlog_wait_time_actual 0 Aug 03 16:38:32 localhost augenrules[537]: enabled 0 Aug 03 16:38:32 localhost augenrules[537]: failure 1 Aug 03 16:38:32 localhost augenrules[537]: pid 0 Aug 03 16:38:32 localhost augenrules[537]: rate_limit 0 Aug 03 16:38:32 localhost augenrules[537]: backlog_limit 8192 Aug 03 16:38:32 localhost augenrules[537]: lost 0 Aug 03 16:38:32 localhost augenrules[537]: backlog 0 Aug 03 16:38:32 localhost augenrules[537]: backlog_wait_time 60000 Aug 03 16:38:32 localhost augenrules[537]: backlog_wait_time_actual 0 Aug 03 16:38:32 localhost augenrules[537]: enabled 0 Aug 03 16:38:32 localhost augenrules[537]: failure 1 Aug 03 16:38:32 localhost augenrules[537]: pid 0 Aug 03 16:38:32 localhost augenrules[537]: rate_limit 0 Aug 03 16:38:32 localhost augenrules[537]: backlog_limit 8192 Aug 03 16:38:32 localhost augenrules[537]: lost 0 Aug 03 16:38:32 localhost augenrules[537]: backlog 0 Aug 03 16:38:32 localhost augenrules[537]: backlog_wait_time 60000 Aug 03 16:38:32 localhost augenrules[537]: backlog_wait_time_actual 0 Aug 03 16:38:32 localhost systemd[1]: audit-rules.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit audit-rules.service has successfully entered the 'dead' state. Aug 03 16:38:32 localhost systemd[1]: Finished audit-rules.service - Load Audit Rules. ░░ Subject: A start job for unit audit-rules.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit audit-rules.service has finished successfully. ░░ ░░ The job identifier is 231. Aug 03 16:38:32 localhost systemd[1]: Starting auditd.service - Security Audit Logging Service... ░░ Subject: A start job for unit auditd.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit auditd.service has begun execution. ░░ ░░ The job identifier is 230. Aug 03 16:38:32 localhost systemd[1]: Started systemd-udevd.service - Rule-based Manager for Device Events and Files. ░░ Subject: A start job for unit systemd-udevd.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-udevd.service has finished successfully. ░░ ░░ The job identifier is 141. Aug 03 16:38:32 localhost systemd[1]: Starting modprobe@configfs.service - Load Kernel Module configfs... ░░ Subject: A start job for unit modprobe@configfs.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit modprobe@configfs.service has begun execution. ░░ ░░ The job identifier is 288. Aug 03 16:38:32 localhost systemd[1]: modprobe@configfs.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit modprobe@configfs.service has successfully entered the 'dead' state. Aug 03 16:38:32 localhost systemd[1]: Finished modprobe@configfs.service - Load Kernel Module configfs. ░░ Subject: A start job for unit modprobe@configfs.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit modprobe@configfs.service has finished successfully. ░░ ░░ The job identifier is 288. Aug 03 16:38:32 localhost systemd[1]: Condition check resulted in dev-ttyS0.device - /dev/ttyS0 being skipped. ░░ Subject: A start job for unit dev-ttyS0.device has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit dev-ttyS0.device has finished successfully. ░░ ░░ The job identifier is 227. Aug 03 16:38:32 localhost 55-scsi-sg3_id.rules[573]: WARNING: SCSI device xvda has no device ID, consider changing .SCSI_ID_SERIAL_SRC in 00-scsi-sg3_config.rules Aug 03 16:38:32 localhost kernel: input: PC Speaker as /devices/platform/pcspkr/input/input5 Aug 03 16:38:32 localhost kernel: piix4_smbus 0000:00:01.3: SMBus base address uninitialized - upgrade BIOS or use force_addr=0xaddr Aug 03 16:38:32 localhost systemd[1]: Starting systemd-vconsole-setup.service - Virtual Console Setup... ░░ Subject: A start job for unit systemd-vconsole-setup.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-vconsole-setup.service has begun execution. ░░ ░░ The job identifier is 294. Aug 03 16:38:32 localhost kernel: RAPL PMU: API unit is 2^-32 Joules, 0 fixed counters, 655360 ms ovfl timer Aug 03 16:38:32 localhost auditd[580]: No plugins found, not dispatching events Aug 03 16:38:32 localhost auditd[580]: Init complete, auditd 4.0 listening for events (startup state enable) Aug 03 16:38:32 localhost systemd[1]: Started auditd.service - Security Audit Logging Service. ░░ Subject: A start job for unit auditd.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit auditd.service has finished successfully. ░░ ░░ The job identifier is 230. Aug 03 16:38:32 localhost systemd[1]: Starting systemd-update-utmp.service - Record System Boot/Shutdown in UTMP... ░░ Subject: A start job for unit systemd-update-utmp.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-update-utmp.service has begun execution. ░░ ░░ The job identifier is 251. Aug 03 16:38:32 localhost (udev-worker)[555]: Network interface NamePolicy= disabled on kernel command line. Aug 03 16:38:32 localhost kernel: cirrus 0000:00:02.0: vgaarb: deactivate vga console Aug 03 16:38:32 localhost kernel: Console: switching to colour dummy device 80x25 Aug 03 16:38:32 localhost kernel: [drm] Initialized cirrus 2.0.0 2019 for 0000:00:02.0 on minor 0 Aug 03 16:38:32 localhost kernel: fbcon: cirrusdrmfb (fb0) is primary device Aug 03 16:38:32 localhost kernel: Console: switching to colour frame buffer device 128x48 Aug 03 16:38:32 localhost kernel: cirrus 0000:00:02.0: [drm] fb0: cirrusdrmfb frame buffer device Aug 03 16:38:32 localhost systemd[1]: systemd-vconsole-setup.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-vconsole-setup.service has successfully entered the 'dead' state. Aug 03 16:38:32 localhost systemd[1]: Stopped systemd-vconsole-setup.service - Virtual Console Setup. ░░ Subject: A stop job for unit systemd-vconsole-setup.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit systemd-vconsole-setup.service has finished. ░░ ░░ The job identifier is 294 and the job result is done. Aug 03 16:38:32 localhost systemd[1]: run-credentials-systemd\x2dvconsole\x2dsetup.service.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-credentials-systemd\x2dvconsole\x2dsetup.service.mount has successfully entered the 'dead' state. Aug 03 16:38:32 localhost systemd[1]: Starting systemd-vconsole-setup.service - Virtual Console Setup... ░░ Subject: A start job for unit systemd-vconsole-setup.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-vconsole-setup.service has begun execution. ░░ ░░ The job identifier is 294. Aug 03 16:38:32 localhost systemd[1]: Finished systemd-update-utmp.service - Record System Boot/Shutdown in UTMP. ░░ Subject: A start job for unit systemd-update-utmp.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-update-utmp.service has finished successfully. ░░ ░░ The job identifier is 251. Aug 03 16:38:33 localhost systemd[1]: Finished systemd-vconsole-setup.service - Virtual Console Setup. ░░ Subject: A start job for unit systemd-vconsole-setup.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-vconsole-setup.service has finished successfully. ░░ ░░ The job identifier is 294. Aug 03 16:38:33 localhost systemd[1]: Reached target sysinit.target - System Initialization. ░░ Subject: A start job for unit sysinit.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sysinit.target has finished successfully. ░░ ░░ The job identifier is 120. Aug 03 16:38:33 localhost systemd[1]: Started dnf-makecache.timer - dnf makecache --timer. ░░ Subject: A start job for unit dnf-makecache.timer has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit dnf-makecache.timer has finished successfully. ░░ ░░ The job identifier is 198. Aug 03 16:38:33 localhost systemd[1]: Started fstrim.timer - Discard unused filesystem blocks once a week. ░░ Subject: A start job for unit fstrim.timer has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit fstrim.timer has finished successfully. ░░ ░░ The job identifier is 206. Aug 03 16:38:33 localhost systemd[1]: Started logrotate.timer - Daily rotation of log files. ░░ Subject: A start job for unit logrotate.timer has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit logrotate.timer has finished successfully. ░░ ░░ The job identifier is 207. Aug 03 16:38:33 localhost systemd[1]: Started systemd-tmpfiles-clean.timer - Daily Cleanup of Temporary Directories. ░░ Subject: A start job for unit systemd-tmpfiles-clean.timer has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-tmpfiles-clean.timer has finished successfully. ░░ ░░ The job identifier is 205. Aug 03 16:38:33 localhost systemd[1]: Started unbound-anchor.timer - daily update of the root trust anchor for DNSSEC. ░░ Subject: A start job for unit unbound-anchor.timer has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit unbound-anchor.timer has finished successfully. ░░ ░░ The job identifier is 208. Aug 03 16:38:33 localhost systemd[1]: Reached target timers.target - Timer Units. ░░ Subject: A start job for unit timers.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit timers.target has finished successfully. ░░ ░░ The job identifier is 197. Aug 03 16:38:33 localhost systemd[1]: Listening on dbus.socket - D-Bus System Message Bus Socket. ░░ Subject: A start job for unit dbus.socket has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit dbus.socket has finished successfully. ░░ ░░ The job identifier is 202. Aug 03 16:38:33 localhost systemd[1]: Listening on pcscd.socket - PC/SC Smart Card Daemon Activation Socket. ░░ Subject: A start job for unit pcscd.socket has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pcscd.socket has finished successfully. ░░ ░░ The job identifier is 216. Aug 03 16:38:33 localhost systemd[1]: Listening on sssd-kcm.socket - SSSD Kerberos Cache Manager responder socket. ░░ Subject: A start job for unit sssd-kcm.socket has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sssd-kcm.socket has finished successfully. ░░ ░░ The job identifier is 219. Aug 03 16:38:33 localhost systemd[1]: Listening on systemd-hostnamed.socket - Hostname Service Socket. ░░ Subject: A start job for unit systemd-hostnamed.socket has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-hostnamed.socket has finished successfully. ░░ ░░ The job identifier is 215. Aug 03 16:38:33 localhost systemd[1]: Reached target sockets.target - Socket Units. ░░ Subject: A start job for unit sockets.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sockets.target has finished successfully. ░░ ░░ The job identifier is 210. Aug 03 16:38:33 localhost systemd[1]: Starting dbus-broker.service - D-Bus System Message Bus... ░░ Subject: A start job for unit dbus-broker.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit dbus-broker.service has begun execution. ░░ ░░ The job identifier is 203. Aug 03 16:38:33 localhost systemd[1]: systemd-pcrphase-sysinit.service - TPM PCR Barrier (Initialization) was skipped because of an unmet condition check (ConditionSecurity=measured-uki). ░░ Subject: A start job for unit systemd-pcrphase-sysinit.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-pcrphase-sysinit.service has finished successfully. ░░ ░░ The job identifier is 132. Aug 03 16:38:33 localhost systemd[1]: Started dbus-broker.service - D-Bus System Message Bus. ░░ Subject: A start job for unit dbus-broker.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit dbus-broker.service has finished successfully. ░░ ░░ The job identifier is 203. Aug 03 16:38:33 localhost systemd[1]: Reached target basic.target - Basic System. ░░ Subject: A start job for unit basic.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit basic.target has finished successfully. ░░ ░░ The job identifier is 119. Aug 03 16:38:33 localhost dbus-broker-launch[602]: Ready Aug 03 16:38:34 localhost systemd[1]: Starting chronyd.service - NTP client/server... ░░ Subject: A start job for unit chronyd.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit chronyd.service has begun execution. ░░ ░░ The job identifier is 247. Aug 03 16:38:34 localhost systemd[1]: Starting cloud-init-local.service - Initial cloud-init job (pre-networking)... ░░ Subject: A start job for unit cloud-init-local.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit cloud-init-local.service has begun execution. ░░ ░░ The job identifier is 234. Aug 03 16:38:34 localhost systemd[1]: Starting dracut-shutdown.service - Restore /run/initramfs on shutdown... ░░ Subject: A start job for unit dracut-shutdown.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit dracut-shutdown.service has begun execution. ░░ ░░ The job identifier is 184. Aug 03 16:38:34 localhost systemd[1]: Started irqbalance.service - irqbalance daemon. ░░ Subject: A start job for unit irqbalance.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit irqbalance.service has finished successfully. ░░ ░░ The job identifier is 232. Aug 03 16:38:34 localhost (qbalance)[607]: irqbalance.service: Referenced but unset environment variable evaluates to an empty string: IRQBALANCE_ARGS Aug 03 16:38:34 localhost systemd[1]: Started rngd.service - Hardware RNG Entropy Gatherer Daemon. ░░ Subject: A start job for unit rngd.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit rngd.service has finished successfully. ░░ ░░ The job identifier is 269. Aug 03 16:38:34 localhost systemd[1]: Starting rsyslog.service - System Logging Service... ░░ Subject: A start job for unit rsyslog.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit rsyslog.service has begun execution. ░░ ░░ The job identifier is 268. Aug 03 16:38:34 localhost systemd[1]: ssh-host-keys-migration.service - Update OpenSSH host key permissions was skipped because of an unmet condition check (ConditionPathExists=!/var/lib/.ssh-host-keys-migration). ░░ Subject: A start job for unit ssh-host-keys-migration.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit ssh-host-keys-migration.service has finished successfully. ░░ ░░ The job identifier is 237. Aug 03 16:38:34 localhost systemd[1]: sshd-keygen@ecdsa.service - OpenSSH ecdsa Server Key Generation was skipped because of an unmet condition check (ConditionPathExists=!/run/systemd/generator.early/multi-user.target.wants/cloud-init.target). ░░ Subject: A start job for unit sshd-keygen@ecdsa.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sshd-keygen@ecdsa.service has finished successfully. ░░ ░░ The job identifier is 241. Aug 03 16:38:34 localhost systemd[1]: sshd-keygen@ed25519.service - OpenSSH ed25519 Server Key Generation was skipped because of an unmet condition check (ConditionPathExists=!/run/systemd/generator.early/multi-user.target.wants/cloud-init.target). ░░ Subject: A start job for unit sshd-keygen@ed25519.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sshd-keygen@ed25519.service has finished successfully. ░░ ░░ The job identifier is 239. Aug 03 16:38:34 localhost systemd[1]: sshd-keygen@rsa.service - OpenSSH rsa Server Key Generation was skipped because of an unmet condition check (ConditionPathExists=!/run/systemd/generator.early/multi-user.target.wants/cloud-init.target). ░░ Subject: A start job for unit sshd-keygen@rsa.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sshd-keygen@rsa.service has finished successfully. ░░ ░░ The job identifier is 242. Aug 03 16:38:34 localhost systemd[1]: Reached target sshd-keygen.target. ░░ Subject: A start job for unit sshd-keygen.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sshd-keygen.target has finished successfully. ░░ ░░ The job identifier is 238. Aug 03 16:38:34 localhost systemd[1]: sssd.service - System Security Services Daemon was skipped because no trigger condition checks were met. ░░ Subject: A start job for unit sssd.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sssd.service has finished successfully. ░░ ░░ The job identifier is 270. Aug 03 16:38:34 localhost systemd[1]: Reached target nss-user-lookup.target - User and Group Name Lookups. ░░ Subject: A start job for unit nss-user-lookup.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit nss-user-lookup.target has finished successfully. ░░ ░░ The job identifier is 271. Aug 03 16:38:34 localhost systemd[1]: Starting systemd-logind.service - User Login Management... ░░ Subject: A start job for unit systemd-logind.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-logind.service has begun execution. ░░ ░░ The job identifier is 252. Aug 03 16:38:34 localhost systemd-logind[612]: New seat seat0. ░░ Subject: A new seat seat0 is now available ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A new seat seat0 has been configured and is now available. Aug 03 16:38:34 localhost systemd-logind[612]: Watching system buttons on /dev/input/event0 (Power Button) Aug 03 16:38:34 localhost systemd-logind[612]: Watching system buttons on /dev/input/event1 (Sleep Button) Aug 03 16:38:34 localhost systemd-logind[612]: Watching system buttons on /dev/input/event2 (AT Translated Set 2 keyboard) Aug 03 16:38:34 localhost systemd[1]: Starting unbound-anchor.service - update of the root trust anchor for DNSSEC validation in unbound... ░░ Subject: A start job for unit unbound-anchor.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit unbound-anchor.service has begun execution. ░░ ░░ The job identifier is 303. Aug 03 16:38:34 localhost systemd[1]: Started systemd-logind.service - User Login Management. ░░ Subject: A start job for unit systemd-logind.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-logind.service has finished successfully. ░░ ░░ The job identifier is 252. Aug 03 16:38:34 localhost systemd[1]: Finished dracut-shutdown.service - Restore /run/initramfs on shutdown. ░░ Subject: A start job for unit dracut-shutdown.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit dracut-shutdown.service has finished successfully. ░░ ░░ The job identifier is 184. Aug 03 16:38:34 localhost rsyslogd[611]: imjournal: filecreatemode is not set, using default 0644 [v8.2312.0-2.el10 try https://www.rsyslog.com/e/2186 ] Aug 03 16:38:34 localhost rsyslogd[611]: [origin software="rsyslogd" swVersion="8.2312.0-2.el10" x-pid="611" x-info="https://www.rsyslog.com"] start Aug 03 16:38:34 localhost systemd[1]: Started rsyslog.service - System Logging Service. ░░ Subject: A start job for unit rsyslog.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit rsyslog.service has finished successfully. ░░ ░░ The job identifier is 268. Aug 03 16:38:34 localhost rsyslogd[611]: imjournal: journal files changed, reloading... [v8.2312.0-2.el10 try https://www.rsyslog.com/e/0 ] Aug 03 16:38:34 localhost rngd[610]: Disabling 7: PKCS11 Entropy generator (pkcs11) Aug 03 16:38:34 localhost rngd[610]: Disabling 5: NIST Network Entropy Beacon (nist) Aug 03 16:38:34 localhost rngd[610]: Disabling 9: Qrypt quantum entropy beacon (qrypt) Aug 03 16:38:34 localhost rngd[610]: Disabling 10: Named pipe entropy input (namedpipe) Aug 03 16:38:34 localhost rngd[610]: Initializing available sources Aug 03 16:38:34 localhost rngd[610]: [hwrng ]: Initialization Failed Aug 03 16:38:34 localhost rngd[610]: [rdrand]: Enabling RDRAND rng support Aug 03 16:38:34 localhost rngd[610]: [rdrand]: Initialized Aug 03 16:38:34 localhost rngd[610]: [jitter]: JITTER timeout set to 5 sec Aug 03 16:38:34 localhost rngd[610]: [jitter]: Initializing AES buffer Aug 03 16:38:34 localhost chronyd[631]: chronyd version 4.5 starting (+CMDMON +NTP +REFCLOCK +RTC +PRIVDROP +SCFILTER +SIGND +ASYNCDNS +NTS +SECHASH +IPV6 +DEBUG) Aug 03 16:38:34 localhost chronyd[631]: Frequency 0.000 +/- 1000000.000 ppm read from /var/lib/chrony/drift Aug 03 16:38:34 localhost systemd[1]: unbound-anchor.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit unbound-anchor.service has successfully entered the 'dead' state. Aug 03 16:38:34 localhost systemd[1]: Finished unbound-anchor.service - update of the root trust anchor for DNSSEC validation in unbound. ░░ Subject: A start job for unit unbound-anchor.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit unbound-anchor.service has finished successfully. ░░ ░░ The job identifier is 303. Aug 03 16:38:35 localhost chronyd[631]: Using right/UTC timezone to obtain leap second data Aug 03 16:38:35 localhost chronyd[631]: Loaded seccomp filter (level 2) Aug 03 16:38:35 localhost systemd[1]: Started chronyd.service - NTP client/server. ░░ Subject: A start job for unit chronyd.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit chronyd.service has finished successfully. ░░ ░░ The job identifier is 247. Aug 03 16:38:39 localhost rngd[610]: [jitter]: Unable to obtain AES key, disabling JITTER source Aug 03 16:38:39 localhost rngd[610]: [jitter]: Initialization Failed Aug 03 16:38:39 localhost rngd[610]: Process privileges have been dropped to 2:2 Aug 03 16:38:41 localhost cloud-init[637]: Cloud-init v. 24.1.4-14.el10 running 'init-local' at Sat, 03 Aug 2024 16:38:41 +0000. Up 28.83 seconds. Aug 03 16:38:42 localhost dhcpcd[639]: dhcpcd-10.0.6 starting Aug 03 16:38:42 localhost kernel: 8021q: 802.1Q VLAN Support v1.8 Aug 03 16:38:42 localhost systemd[1]: Listening on systemd-rfkill.socket - Load/Save RF Kill Switch Status /dev/rfkill Watch. ░░ Subject: A start job for unit systemd-rfkill.socket has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-rfkill.socket has finished successfully. ░░ ░░ The job identifier is 382. Aug 03 16:38:42 localhost kernel: cfg80211: Loading compiled-in X.509 certificates for regulatory database Aug 03 16:38:42 localhost kernel: Loaded X.509 cert 'sforshee: 00b28ddf47aef9cea7' Aug 03 16:38:42 localhost kernel: Loaded X.509 cert 'wens: 61c038651aabdcf94bd0ac7ff06c7248db18c600' Aug 03 16:38:42 localhost dhcpcd[642]: DUID 00:01:00:01:2e:41:19:92:0a:ff:ca:de:37:19 Aug 03 16:38:42 localhost dhcpcd[642]: eth0: IAID ca:de:37:19 Aug 03 16:38:43 localhost kernel: platform regulatory.0: Direct firmware load for regulatory.db failed with error -2 Aug 03 16:38:43 localhost kernel: cfg80211: failed to load regulatory.db Aug 03 16:38:43 localhost dhcpcd[642]: eth0: soliciting a DHCP lease Aug 03 16:38:44 localhost dhcpcd[642]: eth0: offered 10.31.15.127 from 10.31.12.1 Aug 03 16:38:44 localhost dhcpcd[642]: eth0: leased 10.31.15.127 for 3600 seconds Aug 03 16:38:44 localhost dhcpcd[642]: eth0: adding route to 10.31.12.0/22 Aug 03 16:38:44 localhost dhcpcd[642]: eth0: adding default route via 10.31.12.1 Aug 03 16:38:44 localhost dhcpcd[642]: control command: /usr/sbin/dhcpcd --dumplease --ipv4only eth0 Aug 03 16:38:44 localhost systemd[1]: Starting systemd-hostnamed.service - Hostname Service... ░░ Subject: A start job for unit systemd-hostnamed.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-hostnamed.service has begun execution. ░░ ░░ The job identifier is 391. Aug 03 16:38:44 localhost systemd[1]: Started systemd-hostnamed.service - Hostname Service. ░░ Subject: A start job for unit systemd-hostnamed.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-hostnamed.service has finished successfully. ░░ ░░ The job identifier is 391. Aug 03 16:38:44 ip-10-31-15-127.us-east-1.aws.redhat.com systemd-hostnamed[661]: Hostname set to (static) Aug 03 16:38:44 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Finished cloud-init-local.service - Initial cloud-init job (pre-networking). ░░ Subject: A start job for unit cloud-init-local.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit cloud-init-local.service has finished successfully. ░░ ░░ The job identifier is 234. Aug 03 16:38:44 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Reached target network-pre.target - Preparation for Network. ░░ Subject: A start job for unit network-pre.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit network-pre.target has finished successfully. ░░ ░░ The job identifier is 173. Aug 03 16:38:44 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Starting NetworkManager.service - Network Manager... ░░ Subject: A start job for unit NetworkManager.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager.service has begun execution. ░░ ░░ The job identifier is 201. Aug 03 16:38:44 ip-10-31-15-127.us-east-1.aws.redhat.com irqbalance[607]: Cannot change IRQ 0 affinity: Input/output error Aug 03 16:38:44 ip-10-31-15-127.us-east-1.aws.redhat.com irqbalance[607]: IRQ 0 affinity is now unmanaged Aug 03 16:38:44 ip-10-31-15-127.us-east-1.aws.redhat.com irqbalance[607]: Cannot change IRQ 48 affinity: Input/output error Aug 03 16:38:44 ip-10-31-15-127.us-east-1.aws.redhat.com irqbalance[607]: IRQ 48 affinity is now unmanaged Aug 03 16:38:44 ip-10-31-15-127.us-east-1.aws.redhat.com irqbalance[607]: Cannot change IRQ 49 affinity: Input/output error Aug 03 16:38:44 ip-10-31-15-127.us-east-1.aws.redhat.com irqbalance[607]: IRQ 49 affinity is now unmanaged Aug 03 16:38:44 ip-10-31-15-127.us-east-1.aws.redhat.com irqbalance[607]: Cannot change IRQ 50 affinity: Input/output error Aug 03 16:38:44 ip-10-31-15-127.us-east-1.aws.redhat.com irqbalance[607]: IRQ 50 affinity is now unmanaged Aug 03 16:38:44 ip-10-31-15-127.us-east-1.aws.redhat.com irqbalance[607]: Cannot change IRQ 51 affinity: Input/output error Aug 03 16:38:44 ip-10-31-15-127.us-east-1.aws.redhat.com irqbalance[607]: IRQ 51 affinity is now unmanaged Aug 03 16:38:44 ip-10-31-15-127.us-east-1.aws.redhat.com irqbalance[607]: Cannot change IRQ 52 affinity: Input/output error Aug 03 16:38:44 ip-10-31-15-127.us-east-1.aws.redhat.com irqbalance[607]: IRQ 52 affinity is now unmanaged Aug 03 16:38:44 ip-10-31-15-127.us-east-1.aws.redhat.com irqbalance[607]: Cannot change IRQ 53 affinity: Input/output error Aug 03 16:38:44 ip-10-31-15-127.us-east-1.aws.redhat.com irqbalance[607]: IRQ 53 affinity is now unmanaged Aug 03 16:38:44 ip-10-31-15-127.us-east-1.aws.redhat.com irqbalance[607]: Cannot change IRQ 54 affinity: Input/output error Aug 03 16:38:44 ip-10-31-15-127.us-east-1.aws.redhat.com irqbalance[607]: IRQ 54 affinity is now unmanaged Aug 03 16:38:44 ip-10-31-15-127.us-east-1.aws.redhat.com irqbalance[607]: Cannot change IRQ 55 affinity: Input/output error Aug 03 16:38:44 ip-10-31-15-127.us-east-1.aws.redhat.com irqbalance[607]: IRQ 55 affinity is now unmanaged Aug 03 16:38:44 ip-10-31-15-127.us-east-1.aws.redhat.com irqbalance[607]: Cannot change IRQ 56 affinity: Input/output error Aug 03 16:38:44 ip-10-31-15-127.us-east-1.aws.redhat.com irqbalance[607]: IRQ 56 affinity is now unmanaged Aug 03 16:38:44 ip-10-31-15-127.us-east-1.aws.redhat.com irqbalance[607]: Cannot change IRQ 57 affinity: Input/output error Aug 03 16:38:44 ip-10-31-15-127.us-east-1.aws.redhat.com irqbalance[607]: IRQ 57 affinity is now unmanaged Aug 03 16:38:44 ip-10-31-15-127.us-east-1.aws.redhat.com irqbalance[607]: Cannot change IRQ 58 affinity: Input/output error Aug 03 16:38:44 ip-10-31-15-127.us-east-1.aws.redhat.com irqbalance[607]: IRQ 58 affinity is now unmanaged Aug 03 16:38:44 ip-10-31-15-127.us-east-1.aws.redhat.com irqbalance[607]: Cannot change IRQ 59 affinity: Input/output error Aug 03 16:38:44 ip-10-31-15-127.us-east-1.aws.redhat.com irqbalance[607]: IRQ 59 affinity is now unmanaged Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com NetworkManager[668]: [1722703125.2148] NetworkManager (version 1.48.4-1.el10.1) is starting... (boot:f918ba53-6683-4114-ab94-acdfb1690bf8) Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com NetworkManager[668]: [1722703125.2150] Read config: /etc/NetworkManager/NetworkManager.conf (etc: 30-cloud-init-ip6-addr-gen-mode.conf) Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com NetworkManager[668]: [1722703125.3033] manager[0x55b947d06a10]: monitoring kernel firmware directory '/lib/firmware'. Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com NetworkManager[668]: [1722703125.3069] hostname: hostname: using hostnamed Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com NetworkManager[668]: [1722703125.3069] hostname: static hostname changed from (none) to "ip-10-31-15-127.us-east-1.aws.redhat.com" Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com NetworkManager[668]: [1722703125.3073] dns-mgr: init: dns=default,systemd-resolved rc-manager=symlink (auto) Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com NetworkManager[668]: [1722703125.3077] manager[0x55b947d06a10]: rfkill: Wi-Fi hardware radio set enabled Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com NetworkManager[668]: [1722703125.3077] manager[0x55b947d06a10]: rfkill: WWAN hardware radio set enabled Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com NetworkManager[668]: [1722703125.3111] manager: rfkill: Wi-Fi enabled by radio killswitch; enabled by state file Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com NetworkManager[668]: [1722703125.3112] manager: rfkill: WWAN enabled by radio killswitch; enabled by state file Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com NetworkManager[668]: [1722703125.3112] manager: Networking is enabled by state file Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com NetworkManager[668]: [1722703125.3131] settings: Loaded settings plugin: keyfile (internal) Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com NetworkManager[668]: [1722703125.3189] dhcp: init: Using DHCP client 'internal' Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com NetworkManager[668]: [1722703125.3194] manager: (lo): new Loopback device (/org/freedesktop/NetworkManager/Devices/1) Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Starting NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service... ░░ Subject: A start job for unit NetworkManager-dispatcher.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has begun execution. ░░ ░░ The job identifier is 468. Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com NetworkManager[668]: [1722703125.3219] device (lo): state change: unmanaged -> unavailable (reason 'connection-assumed', sys-iface-state: 'external') Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com NetworkManager[668]: [1722703125.3231] device (lo): state change: unavailable -> disconnected (reason 'connection-assumed', sys-iface-state: 'external') Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com NetworkManager[668]: [1722703125.3237] device (lo): Activation: starting connection 'lo' (b29afc40-5a84-42e4-9b3d-3192ff420e5d) Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com NetworkManager[668]: [1722703125.3246] manager: (eth0): new Ethernet device (/org/freedesktop/NetworkManager/Devices/2) Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com NetworkManager[668]: [1722703125.3250] device (eth0): state change: unmanaged -> unavailable (reason 'managed', sys-iface-state: 'external') Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Started NetworkManager.service - Network Manager. ░░ Subject: A start job for unit NetworkManager.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager.service has finished successfully. ░░ ░░ The job identifier is 201. Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com NetworkManager[668]: [1722703125.3282] bus-manager: acquired D-Bus service "org.freedesktop.NetworkManager" Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Reached target network.target - Network. ░░ Subject: A start job for unit network.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit network.target has finished successfully. ░░ ░░ The job identifier is 204. Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com NetworkManager[668]: [1722703125.3287] device (lo): state change: disconnected -> prepare (reason 'none', sys-iface-state: 'external') Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com NetworkManager[668]: [1722703125.3289] device (lo): state change: prepare -> config (reason 'none', sys-iface-state: 'external') Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com NetworkManager[668]: [1722703125.3291] device (lo): state change: config -> ip-config (reason 'none', sys-iface-state: 'external') Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com NetworkManager[668]: [1722703125.3292] device (eth0): carrier: link connected Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com NetworkManager[668]: [1722703125.3294] device (lo): state change: ip-config -> ip-check (reason 'none', sys-iface-state: 'external') Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com NetworkManager[668]: [1722703125.3298] device (eth0): state change: unavailable -> disconnected (reason 'carrier-changed', sys-iface-state: 'managed') Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com NetworkManager[668]: [1722703125.3302] policy: auto-activating connection 'cloud-init eth0' (1dd9a779-d327-56e1-8454-c65e2556c12c) Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com NetworkManager[668]: [1722703125.3318] device (eth0): Activation: starting connection 'cloud-init eth0' (1dd9a779-d327-56e1-8454-c65e2556c12c) Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com NetworkManager[668]: [1722703125.3321] device (eth0): state change: disconnected -> prepare (reason 'none', sys-iface-state: 'managed') Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com NetworkManager[668]: [1722703125.3324] manager: NetworkManager state is now CONNECTING Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com NetworkManager[668]: [1722703125.3329] device (eth0): state change: prepare -> config (reason 'none', sys-iface-state: 'managed') Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com NetworkManager[668]: [1722703125.3337] device (eth0): state change: config -> ip-config (reason 'none', sys-iface-state: 'managed') Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com NetworkManager[668]: [1722703125.3342] dhcp4 (eth0): activation: beginning transaction (timeout in 45 seconds) Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com NetworkManager[668]: [1722703125.3356] dhcp4 (eth0): state changed new lease, address=10.31.15.127, acd pending Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Starting NetworkManager-wait-online.service - Network Manager Wait Online... ░░ Subject: A start job for unit NetworkManager-wait-online.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-wait-online.service has begun execution. ░░ ░░ The job identifier is 200. Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Starting gssproxy.service - GSSAPI Proxy Daemon... ░░ Subject: A start job for unit gssproxy.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit gssproxy.service has begun execution. ░░ ░░ The job identifier is 264. Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com NetworkManager[668]: [1722703125.4614] dhcp4 (eth0): state changed new lease, address=10.31.15.127 Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com NetworkManager[668]: [1722703125.4619] policy: set 'cloud-init eth0' (eth0) as default for IPv4 routing and DNS Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com NetworkManager[668]: [1722703125.5524] device (eth0): state change: ip-config -> ip-check (reason 'none', sys-iface-state: 'managed') Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Started gssproxy.service - GSSAPI Proxy Daemon. ░░ Subject: A start job for unit gssproxy.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit gssproxy.service has finished successfully. ░░ ░░ The job identifier is 264. Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: rpc-gssd.service - RPC security service for NFS client and server was skipped because of an unmet condition check (ConditionPathExists=/etc/krb5.keytab). ░░ Subject: A start job for unit rpc-gssd.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit rpc-gssd.service has finished successfully. ░░ ░░ The job identifier is 260. Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Reached target nfs-client.target - NFS client services. ░░ Subject: A start job for unit nfs-client.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit nfs-client.target has finished successfully. ░░ ░░ The job identifier is 257. Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Reached target remote-fs-pre.target - Preparation for Remote File Systems. ░░ Subject: A start job for unit remote-fs-pre.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit remote-fs-pre.target has finished successfully. ░░ ░░ The job identifier is 265. Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Reached target remote-cryptsetup.target - Remote Encrypted Volumes. ░░ Subject: A start job for unit remote-cryptsetup.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit remote-cryptsetup.target has finished successfully. ░░ ░░ The job identifier is 246. Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Reached target remote-fs.target - Remote File Systems. ░░ Subject: A start job for unit remote-fs.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit remote-fs.target has finished successfully. ░░ ░░ The job identifier is 256. Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: systemd-pcrphase.service - TPM PCR Barrier (User) was skipped because of an unmet condition check (ConditionSecurity=measured-uki). ░░ Subject: A start job for unit systemd-pcrphase.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-pcrphase.service has finished successfully. ░░ ░░ The job identifier is 186. Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Started NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service. ░░ Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has finished successfully. ░░ ░░ The job identifier is 468. Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com NetworkManager[668]: [1722703125.7986] device (lo): state change: ip-check -> secondaries (reason 'none', sys-iface-state: 'external') Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com NetworkManager[668]: [1722703125.7991] device (lo): state change: secondaries -> activated (reason 'none', sys-iface-state: 'external') Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com NetworkManager[668]: [1722703125.7995] device (lo): Activation: successful, device activated. Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com NetworkManager[668]: [1722703125.8001] device (eth0): state change: ip-check -> secondaries (reason 'none', sys-iface-state: 'managed') Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com NetworkManager[668]: [1722703125.8002] device (eth0): state change: secondaries -> activated (reason 'none', sys-iface-state: 'managed') Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com NetworkManager[668]: [1722703125.8005] manager: NetworkManager state is now CONNECTED_SITE Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com NetworkManager[668]: [1722703125.8007] device (eth0): Activation: successful, device activated. Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com NetworkManager[668]: [1722703125.8012] manager: NetworkManager state is now CONNECTED_GLOBAL Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com NetworkManager[668]: [1722703125.8021] manager: startup complete Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Finished NetworkManager-wait-online.service - Network Manager Wait Online. ░░ Subject: A start job for unit NetworkManager-wait-online.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-wait-online.service has finished successfully. ░░ ░░ The job identifier is 200. Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Starting cloud-init.service - Initial cloud-init job (metadata service crawler)... ░░ Subject: A start job for unit cloud-init.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit cloud-init.service has begun execution. ░░ ░░ The job identifier is 235. Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com chronyd[631]: Added source 10.11.160.238 Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com chronyd[631]: Added source 10.18.100.10 Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com chronyd[631]: Added source 10.2.32.37 Aug 03 16:38:45 ip-10-31-15-127.us-east-1.aws.redhat.com chronyd[631]: Added source 10.2.32.38 Aug 03 16:38:46 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: Cloud-init v. 24.1.4-14.el10 running 'init' at Sat, 03 Aug 2024 16:38:46 +0000. Up 33.27 seconds. Aug 03 16:38:46 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: ci-info: ++++++++++++++++++++++++++++++++++++++Net device info+++++++++++++++++++++++++++++++++++++++ Aug 03 16:38:46 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: ci-info: +--------+------+-----------------------------+---------------+--------+-------------------+ Aug 03 16:38:46 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: ci-info: | Device | Up | Address | Mask | Scope | Hw-Address | Aug 03 16:38:46 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: ci-info: +--------+------+-----------------------------+---------------+--------+-------------------+ Aug 03 16:38:46 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: ci-info: | eth0 | True | 10.31.15.127 | 255.255.252.0 | global | 0a:ff:ca:de:37:19 | Aug 03 16:38:46 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: ci-info: | eth0 | True | fe80::8ff:caff:fede:3719/64 | . | link | 0a:ff:ca:de:37:19 | Aug 03 16:38:46 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: ci-info: | lo | True | 127.0.0.1 | 255.0.0.0 | host | . | Aug 03 16:38:46 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: ci-info: | lo | True | ::1/128 | . | host | . | Aug 03 16:38:46 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: ci-info: +--------+------+-----------------------------+---------------+--------+-------------------+ Aug 03 16:38:46 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: ci-info: ++++++++++++++++++++++++++++Route IPv4 info+++++++++++++++++++++++++++++ Aug 03 16:38:46 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: ci-info: +-------+-------------+------------+---------------+-----------+-------+ Aug 03 16:38:46 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: ci-info: | Route | Destination | Gateway | Genmask | Interface | Flags | Aug 03 16:38:46 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: ci-info: +-------+-------------+------------+---------------+-----------+-------+ Aug 03 16:38:46 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: ci-info: | 0 | 0.0.0.0 | 10.31.12.1 | 0.0.0.0 | eth0 | UG | Aug 03 16:38:46 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: ci-info: | 1 | 10.31.12.0 | 0.0.0.0 | 255.255.252.0 | eth0 | U | Aug 03 16:38:46 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: ci-info: +-------+-------------+------------+---------------+-----------+-------+ Aug 03 16:38:46 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: ci-info: +++++++++++++++++++Route IPv6 info+++++++++++++++++++ Aug 03 16:38:46 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: ci-info: +-------+-------------+---------+-----------+-------+ Aug 03 16:38:46 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: ci-info: | Route | Destination | Gateway | Interface | Flags | Aug 03 16:38:46 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: ci-info: +-------+-------------+---------+-----------+-------+ Aug 03 16:38:46 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: ci-info: | 0 | fe80::/64 | :: | eth0 | U | Aug 03 16:38:46 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: ci-info: | 2 | multicast | :: | eth0 | U | Aug 03 16:38:46 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: ci-info: +-------+-------------+---------+-----------+-------+ Aug 03 16:38:46 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: 2024-08-03 16:38:46,245 - handlers[WARNING]: Unhandled non-multipart (text/x-not-multipart) userdata: 'b'~/.citool.d/post-install'...' Aug 03 16:38:46 ip-10-31-15-127.us-east-1.aws.redhat.com 55-scsi-sg3_id.rules[819]: WARNING: SCSI device xvda has no device ID, consider changing .SCSI_ID_SERIAL_SRC in 00-scsi-sg3_config.rules Aug 03 16:38:46 ip-10-31-15-127.us-east-1.aws.redhat.com 55-scsi-sg3_id.rules[822]: WARNING: SCSI device xvda has no device ID, consider changing .SCSI_ID_SERIAL_SRC in 00-scsi-sg3_config.rules Aug 03 16:38:47 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: Generating public/private rsa key pair. Aug 03 16:38:47 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: Your identification has been saved in /etc/ssh/ssh_host_rsa_key Aug 03 16:38:47 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: Your public key has been saved in /etc/ssh/ssh_host_rsa_key.pub Aug 03 16:38:47 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: The key fingerprint is: Aug 03 16:38:47 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: SHA256:ZvesGiprDsEtUfJUwK3IbWOK393/5+Op2ojKDN75/FU root@ip-10-31-15-127.us-east-1.aws.redhat.com Aug 03 16:38:47 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: The key's randomart image is: Aug 03 16:38:47 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: +---[RSA 3072]----+ Aug 03 16:38:47 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: | ..++. | Aug 03 16:38:47 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: | =. . | Aug 03 16:38:47 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: | ..o.. | Aug 03 16:38:47 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: | .oo* | Aug 03 16:38:47 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: | .++.. S . E | Aug 03 16:38:47 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: |. .o o . o . | Aug 03 16:38:47 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: | ..... .. + | Aug 03 16:38:47 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: | .o+=.+.o = o. | Aug 03 16:38:47 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: | o+oBo=+*o+=+. | Aug 03 16:38:47 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: +----[SHA256]-----+ Aug 03 16:38:47 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: Generating public/private ecdsa key pair. Aug 03 16:38:47 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: Your identification has been saved in /etc/ssh/ssh_host_ecdsa_key Aug 03 16:38:47 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: Your public key has been saved in /etc/ssh/ssh_host_ecdsa_key.pub Aug 03 16:38:47 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: The key fingerprint is: Aug 03 16:38:47 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: SHA256:jE4mUbXoU7JTJPc3vhFSSUGyU5Fge2eMsO0gNuq8XGQ root@ip-10-31-15-127.us-east-1.aws.redhat.com Aug 03 16:38:47 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: The key's randomart image is: Aug 03 16:38:47 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: +---[ECDSA 256]---+ Aug 03 16:38:47 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: | o.+ =+B= | Aug 03 16:38:47 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: | . = + Xoo | Aug 03 16:38:47 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: | . o B O B + | Aug 03 16:38:47 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: | o X o O = | Aug 03 16:38:47 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: | . X E + | Aug 03 16:38:47 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: | B = o | Aug 03 16:38:47 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: | + . . | Aug 03 16:38:47 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: | . o | Aug 03 16:38:47 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: | o | Aug 03 16:38:47 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: +----[SHA256]-----+ Aug 03 16:38:47 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: Generating public/private ed25519 key pair. Aug 03 16:38:47 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: Your identification has been saved in /etc/ssh/ssh_host_ed25519_key Aug 03 16:38:47 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: Your public key has been saved in /etc/ssh/ssh_host_ed25519_key.pub Aug 03 16:38:47 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: The key fingerprint is: Aug 03 16:38:47 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: SHA256:MtJQ4JxB9EQd79wj6M4U+A+GFz9cH4BXaq8JsHFvXD4 root@ip-10-31-15-127.us-east-1.aws.redhat.com Aug 03 16:38:47 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: The key's randomart image is: Aug 03 16:38:47 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: +--[ED25519 256]--+ Aug 03 16:38:47 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: | o=o+... . | Aug 03 16:38:47 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: | o * .. . o | Aug 03 16:38:47 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: | = . o + = . | Aug 03 16:38:47 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: | o . O * = | Aug 03 16:38:47 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: | . = S = B E | Aug 03 16:38:47 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: | . * = = = o | Aug 03 16:38:47 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: | . B + o . | Aug 03 16:38:47 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: | * o . | Aug 03 16:38:47 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: | o . | Aug 03 16:38:47 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[753]: +----[SHA256]-----+ Aug 03 16:38:48 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Finished cloud-init.service - Initial cloud-init job (metadata service crawler). ░░ Subject: A start job for unit cloud-init.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit cloud-init.service has finished successfully. ░░ ░░ The job identifier is 235. Aug 03 16:38:48 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Reached target cloud-config.target - Cloud-config availability. ░░ Subject: A start job for unit cloud-config.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit cloud-config.target has finished successfully. ░░ ░░ The job identifier is 244. Aug 03 16:38:48 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Reached target network-online.target - Network is Online. ░░ Subject: A start job for unit network-online.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit network-online.target has finished successfully. ░░ ░░ The job identifier is 199. Aug 03 16:38:48 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Starting cloud-config.service - Apply the settings specified in cloud-config... ░░ Subject: A start job for unit cloud-config.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit cloud-config.service has begun execution. ░░ ░░ The job identifier is 243. Aug 03 16:38:48 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Starting kdump.service - Crash recovery kernel arming... ░░ Subject: A start job for unit kdump.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit kdump.service has begun execution. ░░ ░░ The job identifier is 272. Aug 03 16:38:48 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Starting restraintd.service - The restraint harness.... ░░ Subject: A start job for unit restraintd.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit restraintd.service has begun execution. ░░ ░░ The job identifier is 266. Aug 03 16:38:48 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Starting rpc-statd-notify.service - Notify NFS peers of a restart... ░░ Subject: A start job for unit rpc-statd-notify.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit rpc-statd-notify.service has begun execution. ░░ ░░ The job identifier is 258. Aug 03 16:38:48 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Starting sshd.service - OpenSSH server daemon... ░░ Subject: A start job for unit sshd.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sshd.service has begun execution. ░░ ░░ The job identifier is 236. Aug 03 16:38:48 ip-10-31-15-127.us-east-1.aws.redhat.com (sshd)[848]: sshd.service: Referenced but unset environment variable evaluates to an empty string: OPTIONS Aug 03 16:38:48 ip-10-31-15-127.us-east-1.aws.redhat.com sm-notify[838]: Version 2.6.4 starting Aug 03 16:38:48 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Started rpc-statd-notify.service - Notify NFS peers of a restart. ░░ Subject: A start job for unit rpc-statd-notify.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit rpc-statd-notify.service has finished successfully. ░░ ░░ The job identifier is 258. Aug 03 16:38:48 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Started restraintd.service - The restraint harness.. ░░ Subject: A start job for unit restraintd.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit restraintd.service has finished successfully. ░░ ░░ The job identifier is 266. Aug 03 16:38:48 ip-10-31-15-127.us-east-1.aws.redhat.com sshd[848]: Server listening on 0.0.0.0 port 22. Aug 03 16:38:48 ip-10-31-15-127.us-east-1.aws.redhat.com sshd[848]: Server listening on :: port 22. Aug 03 16:38:48 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Started sshd.service - OpenSSH server daemon. ░░ Subject: A start job for unit sshd.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sshd.service has finished successfully. ░░ ░░ The job identifier is 236. Aug 03 16:38:48 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[867]: Cloud-init v. 24.1.4-14.el10 running 'modules:config' at Sat, 03 Aug 2024 16:38:48 +0000. Up 35.58 seconds. Aug 03 16:38:48 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Stopping sshd.service - OpenSSH server daemon... ░░ Subject: A stop job for unit sshd.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit sshd.service has begun execution. ░░ ░░ The job identifier is 571. Aug 03 16:38:48 ip-10-31-15-127.us-east-1.aws.redhat.com sshd[848]: Received signal 15; terminating. Aug 03 16:38:48 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: sshd.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit sshd.service has successfully entered the 'dead' state. Aug 03 16:38:48 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Stopped sshd.service - OpenSSH server daemon. ░░ Subject: A stop job for unit sshd.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit sshd.service has finished. ░░ ░░ The job identifier is 571 and the job result is done. Aug 03 16:38:48 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Stopped target sshd-keygen.target. ░░ Subject: A stop job for unit sshd-keygen.target has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit sshd-keygen.target has finished. ░░ ░░ The job identifier is 654 and the job result is done. Aug 03 16:38:48 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Stopping sshd-keygen.target... ░░ Subject: A stop job for unit sshd-keygen.target has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit sshd-keygen.target has begun execution. ░░ ░░ The job identifier is 654. Aug 03 16:38:48 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: ssh-host-keys-migration.service - Update OpenSSH host key permissions was skipped because of an unmet condition check (ConditionPathExists=!/var/lib/.ssh-host-keys-migration). ░░ Subject: A start job for unit ssh-host-keys-migration.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit ssh-host-keys-migration.service has finished successfully. ░░ ░░ The job identifier is 648. Aug 03 16:38:48 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: sshd-keygen@ecdsa.service - OpenSSH ecdsa Server Key Generation was skipped because of an unmet condition check (ConditionPathExists=!/run/systemd/generator.early/multi-user.target.wants/cloud-init.target). ░░ Subject: A start job for unit sshd-keygen@ecdsa.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sshd-keygen@ecdsa.service has finished successfully. ░░ ░░ The job identifier is 652. Aug 03 16:38:48 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: sshd-keygen@ed25519.service - OpenSSH ed25519 Server Key Generation was skipped because of an unmet condition check (ConditionPathExists=!/run/systemd/generator.early/multi-user.target.wants/cloud-init.target). ░░ Subject: A start job for unit sshd-keygen@ed25519.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sshd-keygen@ed25519.service has finished successfully. ░░ ░░ The job identifier is 650. Aug 03 16:38:48 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: sshd-keygen@rsa.service - OpenSSH rsa Server Key Generation was skipped because of an unmet condition check (ConditionPathExists=!/run/systemd/generator.early/multi-user.target.wants/cloud-init.target). ░░ Subject: A start job for unit sshd-keygen@rsa.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sshd-keygen@rsa.service has finished successfully. ░░ ░░ The job identifier is 653. Aug 03 16:38:48 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Reached target sshd-keygen.target. ░░ Subject: A start job for unit sshd-keygen.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sshd-keygen.target has finished successfully. ░░ ░░ The job identifier is 654. Aug 03 16:38:48 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Starting sshd.service - OpenSSH server daemon... ░░ Subject: A start job for unit sshd.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sshd.service has begun execution. ░░ ░░ The job identifier is 571. Aug 03 16:38:48 ip-10-31-15-127.us-east-1.aws.redhat.com (sshd)[874]: sshd.service: Referenced but unset environment variable evaluates to an empty string: OPTIONS Aug 03 16:38:48 ip-10-31-15-127.us-east-1.aws.redhat.com sshd[874]: Server listening on 0.0.0.0 port 22. Aug 03 16:38:48 ip-10-31-15-127.us-east-1.aws.redhat.com sshd[874]: Server listening on :: port 22. Aug 03 16:38:48 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Started sshd.service - OpenSSH server daemon. ░░ Subject: A start job for unit sshd.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sshd.service has finished successfully. ░░ ░░ The job identifier is 571. Aug 03 16:38:48 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Finished cloud-config.service - Apply the settings specified in cloud-config. ░░ Subject: A start job for unit cloud-config.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit cloud-config.service has finished successfully. ░░ ░░ The job identifier is 243. Aug 03 16:38:48 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Starting cloud-final.service - Execute cloud user/final scripts... ░░ Subject: A start job for unit cloud-final.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit cloud-final.service has begun execution. ░░ ░░ The job identifier is 245. Aug 03 16:38:48 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Starting systemd-user-sessions.service - Permit User Sessions... ░░ Subject: A start job for unit systemd-user-sessions.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-user-sessions.service has begun execution. ░░ ░░ The job identifier is 267. Aug 03 16:38:48 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Finished systemd-user-sessions.service - Permit User Sessions. ░░ Subject: A start job for unit systemd-user-sessions.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-user-sessions.service has finished successfully. ░░ ░░ The job identifier is 267. Aug 03 16:38:48 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Started crond.service - Command Scheduler. ░░ Subject: A start job for unit crond.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit crond.service has finished successfully. ░░ ░░ The job identifier is 229. Aug 03 16:38:48 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Started getty@tty1.service - Getty on tty1. ░░ Subject: A start job for unit getty@tty1.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit getty@tty1.service has finished successfully. ░░ ░░ The job identifier is 222. Aug 03 16:38:48 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Started serial-getty@ttyS0.service - Serial Getty on ttyS0. ░░ Subject: A start job for unit serial-getty@ttyS0.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit serial-getty@ttyS0.service has finished successfully. ░░ ░░ The job identifier is 226. Aug 03 16:38:48 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Reached target getty.target - Login Prompts. ░░ Subject: A start job for unit getty.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit getty.target has finished successfully. ░░ ░░ The job identifier is 221. Aug 03 16:38:48 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Reached target multi-user.target - Multi-User System. ░░ Subject: A start job for unit multi-user.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit multi-user.target has finished successfully. ░░ ░░ The job identifier is 118. Aug 03 16:38:48 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Starting systemd-update-utmp-runlevel.service - Record Runlevel Change in UTMP... ░░ Subject: A start job for unit systemd-update-utmp-runlevel.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-update-utmp-runlevel.service has begun execution. ░░ ░░ The job identifier is 250. Aug 03 16:38:48 ip-10-31-15-127.us-east-1.aws.redhat.com crond[891]: (CRON) STARTUP (1.7.0) Aug 03 16:38:48 ip-10-31-15-127.us-east-1.aws.redhat.com crond[891]: (CRON) INFO (Syslog will be used instead of sendmail.) Aug 03 16:38:48 ip-10-31-15-127.us-east-1.aws.redhat.com crond[891]: (CRON) INFO (RANDOM_DELAY will be scaled with factor 7% if used.) Aug 03 16:38:48 ip-10-31-15-127.us-east-1.aws.redhat.com crond[891]: (CRON) INFO (running with inotify support) Aug 03 16:38:48 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: systemd-update-utmp-runlevel.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-update-utmp-runlevel.service has successfully entered the 'dead' state. Aug 03 16:38:48 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Finished systemd-update-utmp-runlevel.service - Record Runlevel Change in UTMP. ░░ Subject: A start job for unit systemd-update-utmp-runlevel.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-update-utmp-runlevel.service has finished successfully. ░░ ░░ The job identifier is 250. Aug 03 16:38:48 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[898]: Cloud-init v. 24.1.4-14.el10 running 'modules:final' at Sat, 03 Aug 2024 16:38:48 +0000. Up 36.06 seconds. Aug 03 16:38:48 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[900]: ############################################################# Aug 03 16:38:48 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[901]: -----BEGIN SSH HOST KEY FINGERPRINTS----- Aug 03 16:38:49 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[903]: 256 SHA256:jE4mUbXoU7JTJPc3vhFSSUGyU5Fge2eMsO0gNuq8XGQ root@ip-10-31-15-127.us-east-1.aws.redhat.com (ECDSA) Aug 03 16:38:49 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[905]: 256 SHA256:MtJQ4JxB9EQd79wj6M4U+A+GFz9cH4BXaq8JsHFvXD4 root@ip-10-31-15-127.us-east-1.aws.redhat.com (ED25519) Aug 03 16:38:49 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[907]: 3072 SHA256:ZvesGiprDsEtUfJUwK3IbWOK393/5+Op2ojKDN75/FU root@ip-10-31-15-127.us-east-1.aws.redhat.com (RSA) Aug 03 16:38:49 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[908]: -----END SSH HOST KEY FINGERPRINTS----- Aug 03 16:38:49 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[909]: ############################################################# Aug 03 16:38:49 ip-10-31-15-127.us-east-1.aws.redhat.com restraintd[852]: Listening on http://localhost:8081 Aug 03 16:38:49 ip-10-31-15-127.us-east-1.aws.redhat.com cloud-init[898]: Cloud-init v. 24.1.4-14.el10 finished at Sat, 03 Aug 2024 16:38:49 +0000. Datasource DataSourceEc2Local. Up 36.17 seconds Aug 03 16:38:49 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Finished cloud-final.service - Execute cloud user/final scripts. ░░ Subject: A start job for unit cloud-final.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit cloud-final.service has finished successfully. ░░ ░░ The job identifier is 245. Aug 03 16:38:49 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Reached target cloud-init.target - Cloud-init target. ░░ Subject: A start job for unit cloud-init.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit cloud-init.target has finished successfully. ░░ ░░ The job identifier is 233. Aug 03 16:38:49 ip-10-31-15-127.us-east-1.aws.redhat.com kdumpctl[841]: kdump: Detected change(s) in the following file(s): /etc/fstab Aug 03 16:38:52 ip-10-31-15-127.us-east-1.aws.redhat.com chronyd[631]: Selected source 10.2.32.38 Aug 03 16:38:52 ip-10-31-15-127.us-east-1.aws.redhat.com chronyd[631]: System clock TAI offset set to 37 seconds Aug 03 16:38:54 ip-10-31-15-127.us-east-1.aws.redhat.com kernel: block xvda: the capability attribute has been deprecated. Aug 03 16:38:54 ip-10-31-15-127.us-east-1.aws.redhat.com kdumpctl[841]: kdump: Rebuilding /boot/initramfs-6.10.0-15.el10.x86_64kdump.img Aug 03 16:38:55 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state. Aug 03 16:38:56 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1384]: dracut-101-2.el10 Aug 03 16:38:56 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: Executing: /usr/bin/dracut --add kdumpbase --quiet --hostonly --hostonly-cmdline --hostonly-i18n --hostonly-mode strict --hostonly-nics --aggressive-strip -o "plymouth resume ifcfg earlykdump" --mount "/dev/disk/by-uuid/8605af15-4596-4ba0-84aa-95550e824316 /sysroot xfs rw,relatime,seclabel,attr2,inode64,logbufs=8,logbsize=32k,noquota" --squash-compressor zstd --no-hostonly-default-device -f /boot/initramfs-6.10.0-15.el10.x86_64kdump.img 6.10.0-15.el10.x86_64 Aug 03 16:38:56 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: Module 'systemd-networkd' will not be installed, because command 'networkctl' could not be found! Aug 03 16:38:56 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: Module 'systemd-networkd' will not be installed, because command '/usr/lib/systemd/systemd-networkd' could not be found! Aug 03 16:38:56 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: Module 'systemd-networkd' will not be installed, because command '/usr/lib/systemd/systemd-networkd-wait-online' could not be found! Aug 03 16:38:56 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: Module 'systemd-pcrphase' will not be installed, because command '/usr/lib/systemd/systemd-pcrphase' could not be found! Aug 03 16:38:56 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: Module 'systemd-portabled' will not be installed, because command 'portablectl' could not be found! Aug 03 16:38:56 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: Module 'systemd-portabled' will not be installed, because command '/usr/lib/systemd/systemd-portabled' could not be found! Aug 03 16:38:57 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: Module 'systemd-resolved' will not be installed, because command '/usr/lib/systemd/systemd-resolved' could not be found! Aug 03 16:38:57 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: Module 'systemd-timesyncd' will not be installed, because command '/usr/lib/systemd/systemd-timesyncd' could not be found! Aug 03 16:38:57 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: Module 'systemd-timesyncd' will not be installed, because command '/usr/lib/systemd/systemd-time-wait-sync' could not be found! Aug 03 16:38:57 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: Module 'busybox' will not be installed, because command 'busybox' could not be found! Aug 03 16:38:57 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: Module 'dbus-daemon' will not be installed, because command 'dbus-daemon' could not be found! Aug 03 16:38:57 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: Module 'connman' will not be installed, because command 'connmand' could not be found! Aug 03 16:38:57 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: Module 'connman' will not be installed, because command 'connmanctl' could not be found! Aug 03 16:38:57 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: Module 'connman' will not be installed, because command 'connmand-wait-online' could not be found! Aug 03 16:38:57 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: Module 'ifcfg' will not be installed, because it's in the list to be omitted! Aug 03 16:38:57 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: Module 'plymouth' will not be installed, because it's in the list to be omitted! Aug 03 16:38:57 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: 62bluetooth: Could not find any command of '/usr/lib/bluetooth/bluetoothd /usr/libexec/bluetooth/bluetoothd'! Aug 03 16:38:57 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: Module 'btrfs' will not be installed, because command 'btrfs' could not be found! Aug 03 16:38:57 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: Module 'dmraid' will not be installed, because command 'dmraid' could not be found! Aug 03 16:38:57 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: Module 'mdraid' will not be installed, because command 'mdadm' could not be found! Aug 03 16:38:57 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: Module 'cifs' will not be installed, because command 'mount.cifs' could not be found! Aug 03 16:38:57 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: Module 'iscsi' will not be installed, because command 'iscsi-iname' could not be found! Aug 03 16:38:57 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: Module 'iscsi' will not be installed, because command 'iscsiadm' could not be found! Aug 03 16:38:57 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: Module 'iscsi' will not be installed, because command 'iscsid' could not be found! Aug 03 16:38:57 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: Module 'nvmf' will not be installed, because command 'nvme' could not be found! Aug 03 16:38:57 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: Module 'resume' will not be installed, because it's in the list to be omitted! Aug 03 16:38:57 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: Module 'biosdevname' will not be installed, because command 'biosdevname' could not be found! Aug 03 16:38:57 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: Module 'earlykdump' will not be installed, because it's in the list to be omitted! Aug 03 16:38:57 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: Module 'systemd-pcrphase' will not be installed, because command '/usr/lib/systemd/systemd-pcrphase' could not be found! Aug 03 16:38:57 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: Module 'systemd-portabled' will not be installed, because command 'portablectl' could not be found! Aug 03 16:38:57 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: Module 'systemd-portabled' will not be installed, because command '/usr/lib/systemd/systemd-portabled' could not be found! Aug 03 16:38:57 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: Module 'systemd-resolved' will not be installed, because command '/usr/lib/systemd/systemd-resolved' could not be found! Aug 03 16:38:57 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: Module 'systemd-timesyncd' will not be installed, because command '/usr/lib/systemd/systemd-timesyncd' could not be found! Aug 03 16:38:57 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: Module 'systemd-timesyncd' will not be installed, because command '/usr/lib/systemd/systemd-time-wait-sync' could not be found! Aug 03 16:38:57 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: Module 'busybox' will not be installed, because command 'busybox' could not be found! Aug 03 16:38:57 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: Module 'dbus-daemon' will not be installed, because command 'dbus-daemon' could not be found! Aug 03 16:38:57 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: Module 'connman' will not be installed, because command 'connmand' could not be found! Aug 03 16:38:57 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: Module 'connman' will not be installed, because command 'connmanctl' could not be found! Aug 03 16:38:57 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: Module 'connman' will not be installed, because command 'connmand-wait-online' could not be found! Aug 03 16:38:57 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: 62bluetooth: Could not find any command of '/usr/lib/bluetooth/bluetoothd /usr/libexec/bluetooth/bluetoothd'! Aug 03 16:38:57 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: Module 'btrfs' will not be installed, because command 'btrfs' could not be found! Aug 03 16:38:57 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: Module 'dmraid' will not be installed, because command 'dmraid' could not be found! Aug 03 16:38:57 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: Module 'mdraid' will not be installed, because command 'mdadm' could not be found! Aug 03 16:38:57 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: Module 'cifs' will not be installed, because command 'mount.cifs' could not be found! Aug 03 16:38:57 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: Module 'iscsi' will not be installed, because command 'iscsi-iname' could not be found! Aug 03 16:38:57 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: Module 'iscsi' will not be installed, because command 'iscsiadm' could not be found! Aug 03 16:38:57 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: Module 'iscsi' will not be installed, because command 'iscsid' could not be found! Aug 03 16:38:57 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: Module 'nvmf' will not be installed, because command 'nvme' could not be found! Aug 03 16:38:57 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: *** Including module: systemd *** Aug 03 16:38:58 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: *** Including module: systemd-initrd *** Aug 03 16:38:58 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: *** Including module: rngd *** Aug 03 16:38:58 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: *** Including module: i18n *** Aug 03 16:38:58 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: *** Including module: drm *** Aug 03 16:38:59 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: *** Including module: prefixdevname *** Aug 03 16:38:59 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: *** Including module: kernel-modules *** Aug 03 16:39:00 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: *** Including module: kernel-modules-extra *** Aug 03 16:39:00 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: kernel-modules-extra: configuration source "/run/depmod.d" does not exist Aug 03 16:39:00 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: kernel-modules-extra: configuration source "/lib/depmod.d" does not exist Aug 03 16:39:00 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: kernel-modules-extra: parsing configuration file "/etc/depmod.d/dist.conf" Aug 03 16:39:00 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: kernel-modules-extra: /etc/depmod.d/dist.conf: added "updates extra built-in weak-updates" to the list of search directories Aug 03 16:39:00 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: *** Including module: fstab-sys *** Aug 03 16:39:00 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: *** Including module: rootfs-block *** Aug 03 16:39:00 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: *** Including module: terminfo *** Aug 03 16:39:00 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: *** Including module: udev-rules *** Aug 03 16:39:00 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: *** Including module: dracut-systemd *** Aug 03 16:39:00 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: *** Including module: usrmount *** Aug 03 16:39:00 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: *** Including module: base *** Aug 03 16:39:00 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: *** Including module: fs-lib *** Aug 03 16:39:00 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: *** Including module: kdumpbase *** Aug 03 16:39:01 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: *** Including module: memstrack *** Aug 03 16:39:01 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: *** Including module: microcode_ctl-fw_dir_override *** Aug 03 16:39:01 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: microcode_ctl module: mangling fw_dir Aug 03 16:39:01 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: microcode_ctl: reset fw_dir to "/lib/firmware/updates /lib/firmware" Aug 03 16:39:01 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: microcode_ctl: processing data directory "/usr/share/microcode_ctl/ucode_with_caveats/intel"... Aug 03 16:39:01 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: microcode_ctl: intel: caveats check for kernel version "6.10.0-15.el10.x86_64" passed, adding "/usr/share/microcode_ctl/ucode_with_caveats/intel" to fw_dir variable Aug 03 16:39:01 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: microcode_ctl: processing data directory "/usr/share/microcode_ctl/ucode_with_caveats/intel-06-4f-01"... Aug 03 16:39:01 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: microcode_ctl: configuration "intel-06-4f-01" is ignored Aug 03 16:39:01 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: microcode_ctl: final fw_dir: "/usr/share/microcode_ctl/ucode_with_caveats/intel /lib/firmware/updates /lib/firmware" Aug 03 16:39:01 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: *** Including module: shutdown *** Aug 03 16:39:01 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: *** Including module: squash *** Aug 03 16:39:01 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: *** Including modules done *** Aug 03 16:39:01 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: *** Installing kernel module dependencies *** Aug 03 16:39:01 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: *** Installing kernel module dependencies done *** Aug 03 16:39:01 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: *** Resolving executable dependencies *** Aug 03 16:39:03 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: *** Resolving executable dependencies done *** Aug 03 16:39:03 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: *** Hardlinking files *** Aug 03 16:39:03 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: Mode: real Aug 03 16:39:03 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: Method: sha256 Aug 03 16:39:03 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: Files: 440 Aug 03 16:39:03 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: Linked: 1 files Aug 03 16:39:03 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: Compared: 0 xattrs Aug 03 16:39:03 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: Compared: 11 files Aug 03 16:39:03 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: Saved: 60.55 KiB Aug 03 16:39:03 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: Duration: 0.008510 seconds Aug 03 16:39:03 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: *** Hardlinking files done *** Aug 03 16:39:03 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: *** Generating early-microcode cpio image *** Aug 03 16:39:03 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: *** Constructing GenuineIntel.bin *** Aug 03 16:39:03 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: *** Constructing GenuineIntel.bin *** Aug 03 16:39:03 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: *** Store current command line parameters *** Aug 03 16:39:03 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: Stored kernel commandline: Aug 03 16:39:03 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: No dracut internal kernel commandline stored in the initramfs Aug 03 16:39:03 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: *** Install squash loader *** Aug 03 16:39:04 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: *** Stripping files *** Aug 03 16:39:05 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: *** Stripping files done *** Aug 03 16:39:05 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: *** Squashing the files inside the initramfs *** Aug 03 16:39:10 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: *** Squashing the files inside the initramfs done *** Aug 03 16:39:10 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: *** Creating image file '/boot/initramfs-6.10.0-15.el10.x86_64kdump.img' *** Aug 03 16:39:11 ip-10-31-15-127.us-east-1.aws.redhat.com dracut[1387]: *** Creating initramfs image file '/boot/initramfs-6.10.0-15.el10.x86_64kdump.img' done *** Aug 03 16:39:11 ip-10-31-15-127.us-east-1.aws.redhat.com kernel: PKCS7: Message signed outside of X.509 validity window Aug 03 16:39:11 ip-10-31-15-127.us-east-1.aws.redhat.com kdumpctl[841]: kdump: kexec: loaded kdump kernel Aug 03 16:39:11 ip-10-31-15-127.us-east-1.aws.redhat.com kdumpctl[841]: kdump: Starting kdump: [OK] Aug 03 16:39:11 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Finished kdump.service - Crash recovery kernel arming. ░░ Subject: A start job for unit kdump.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit kdump.service has finished successfully. ░░ ░░ The job identifier is 272. Aug 03 16:39:11 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Startup finished in 990ms (kernel) + 6.157s (initrd) + 51.405s (userspace) = 58.552s. ░░ Subject: System start-up is now complete ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ All system services necessary queued for starting at boot have been ░░ started. Note that this does not mean that the machine is now idle as services ░░ might still be busy with completing start-up. ░░ ░░ Kernel start-up required 990036 microseconds. ░░ ░░ Initrd start-up required 6157085 microseconds. ░░ ░░ Userspace start-up required 51405179 microseconds. Aug 03 16:39:15 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: systemd-hostnamed.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-hostnamed.service has successfully entered the 'dead' state. Aug 03 16:40:08 ip-10-31-15-127.us-east-1.aws.redhat.com sshd-session[3733]: Accepted publickey for root from 10.30.34.46 port 48996 ssh2: RSA SHA256:W3cSdmPJK+d9RwU97ardijPXIZnxHswrpTHWW9oYtEU Aug 03 16:40:08 ip-10-31-15-127.us-east-1.aws.redhat.com sshd-session[3733]: pam_systemd(sshd:session): New sd-bus connection (system-bus-pam-systemd-3733) opened. Aug 03 16:40:08 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Created slice user-0.slice - User Slice of UID 0. ░░ Subject: A start job for unit user-0.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user-0.slice has finished successfully. ░░ ░░ The job identifier is 741. Aug 03 16:40:08 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Starting user-runtime-dir@0.service - User Runtime Directory /run/user/0... ░░ Subject: A start job for unit user-runtime-dir@0.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user-runtime-dir@0.service has begun execution. ░░ ░░ The job identifier is 664. Aug 03 16:40:08 ip-10-31-15-127.us-east-1.aws.redhat.com systemd-logind[612]: New session 1 of user root. ░░ Subject: A new session 1 has been created for user root ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A new session with the ID 1 has been created for the user root. ░░ ░░ The leading process of the session is 3733. Aug 03 16:40:08 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Finished user-runtime-dir@0.service - User Runtime Directory /run/user/0. ░░ Subject: A start job for unit user-runtime-dir@0.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user-runtime-dir@0.service has finished successfully. ░░ ░░ The job identifier is 664. Aug 03 16:40:08 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Starting user@0.service - User Manager for UID 0... ░░ Subject: A start job for unit user@0.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user@0.service has begun execution. ░░ ░░ The job identifier is 743. Aug 03 16:40:08 ip-10-31-15-127.us-east-1.aws.redhat.com systemd-logind[612]: New session 2 of user root. ░░ Subject: A new session 2 has been created for user root ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A new session with the ID 2 has been created for the user root. ░░ ░░ The leading process of the session is 3738. Aug 03 16:40:08 ip-10-31-15-127.us-east-1.aws.redhat.com (systemd)[3738]: pam_unix(systemd-user:session): session opened for user root(uid=0) by root(uid=0) Aug 03 16:40:08 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[3738]: Queued start job for default target default.target. Aug 03 16:40:08 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[3738]: Created slice app.slice - User Application Slice. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 7. Aug 03 16:40:08 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[3738]: grub-boot-success.timer - Mark boot as successful after the user session has run 2 minutes was skipped because of an unmet condition check (ConditionUser=!@system). ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 11. Aug 03 16:40:08 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[3738]: Started systemd-tmpfiles-clean.timer - Daily Cleanup of User's Temporary Directories. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 12. Aug 03 16:40:08 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[3738]: Reached target paths.target - Paths. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 3. Aug 03 16:40:08 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[3738]: Reached target timers.target - Timers. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 10. Aug 03 16:40:08 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[3738]: Starting dbus.socket - D-Bus User Message Bus Socket... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 6. Aug 03 16:40:08 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[3738]: Starting systemd-tmpfiles-setup.service - Create User Files and Directories... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 9. Aug 03 16:40:08 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[3738]: Finished systemd-tmpfiles-setup.service - Create User Files and Directories. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 9. Aug 03 16:40:08 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[3738]: Listening on dbus.socket - D-Bus User Message Bus Socket. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 6. Aug 03 16:40:08 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[3738]: Reached target sockets.target - Sockets. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 5. Aug 03 16:40:08 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[3738]: Reached target basic.target - Basic System. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 2. Aug 03 16:40:08 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[3738]: Reached target default.target - Main User Target. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 1. Aug 03 16:40:08 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Started user@0.service - User Manager for UID 0. ░░ Subject: A start job for unit user@0.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user@0.service has finished successfully. ░░ ░░ The job identifier is 743. Aug 03 16:40:08 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[3738]: Startup finished in 194ms. ░░ Subject: User manager start-up is now complete ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The user manager instance for user 0 has been started. All services queued ░░ for starting have been started. Note that other services might still be starting ░░ up or be started at any later time. ░░ ░░ Startup of the manager took 194709 microseconds. Aug 03 16:40:08 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Started session-1.scope - Session 1 of User root. ░░ Subject: A start job for unit session-1.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit session-1.scope has finished successfully. ░░ ░░ The job identifier is 823. Aug 03 16:40:08 ip-10-31-15-127.us-east-1.aws.redhat.com sshd-session[3733]: pam_unix(sshd:session): session opened for user root(uid=0) by root(uid=0) Aug 03 16:40:09 ip-10-31-15-127.us-east-1.aws.redhat.com sshd-session[3749]: Received disconnect from 10.30.34.46 port 48996:11: disconnected by user Aug 03 16:40:09 ip-10-31-15-127.us-east-1.aws.redhat.com sshd-session[3749]: Disconnected from user root 10.30.34.46 port 48996 Aug 03 16:40:09 ip-10-31-15-127.us-east-1.aws.redhat.com sshd-session[3733]: pam_systemd(sshd:session): New sd-bus connection (system-bus-pam-systemd-3733) opened. Aug 03 16:40:09 ip-10-31-15-127.us-east-1.aws.redhat.com sshd-session[3733]: pam_unix(sshd:session): session closed for user root Aug 03 16:40:09 ip-10-31-15-127.us-east-1.aws.redhat.com systemd-logind[612]: Session 1 logged out. Waiting for processes to exit. Aug 03 16:40:09 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: session-1.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit session-1.scope has successfully entered the 'dead' state. Aug 03 16:40:09 ip-10-31-15-127.us-east-1.aws.redhat.com systemd-logind[612]: Removed session 1. ░░ Subject: Session 1 has been terminated ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A session with the ID 1 has been terminated. Aug 03 16:40:15 ip-10-31-15-127.us-east-1.aws.redhat.com sshd-session[3781]: Connection closed by 10.29.163.172 port 59608 Aug 03 16:40:15 ip-10-31-15-127.us-east-1.aws.redhat.com sshd-session[3782]: Accepted publickey for root from 10.29.163.172 port 59620 ssh2: RSA SHA256:W3cSdmPJK+d9RwU97ardijPXIZnxHswrpTHWW9oYtEU Aug 03 16:40:15 ip-10-31-15-127.us-east-1.aws.redhat.com sshd-session[3782]: pam_systemd(sshd:session): New sd-bus connection (system-bus-pam-systemd-3782) opened. Aug 03 16:40:15 ip-10-31-15-127.us-east-1.aws.redhat.com systemd-logind[612]: New session 3 of user root. ░░ Subject: A new session 3 has been created for user root ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A new session with the ID 3 has been created for the user root. ░░ ░░ The leading process of the session is 3782. Aug 03 16:40:15 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Started session-3.scope - Session 3 of User root. ░░ Subject: A start job for unit session-3.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit session-3.scope has finished successfully. ░░ ░░ The job identifier is 904. Aug 03 16:40:15 ip-10-31-15-127.us-east-1.aws.redhat.com sshd-session[3782]: pam_unix(sshd:session): session opened for user root(uid=0) by root(uid=0) Aug 03 16:40:15 ip-10-31-15-127.us-east-1.aws.redhat.com sshd-session[3785]: Received disconnect from 10.29.163.172 port 59620:11: disconnected by user Aug 03 16:40:15 ip-10-31-15-127.us-east-1.aws.redhat.com sshd-session[3785]: Disconnected from user root 10.29.163.172 port 59620 Aug 03 16:40:15 ip-10-31-15-127.us-east-1.aws.redhat.com sshd-session[3782]: pam_systemd(sshd:session): New sd-bus connection (system-bus-pam-systemd-3782) opened. Aug 03 16:40:15 ip-10-31-15-127.us-east-1.aws.redhat.com sshd-session[3782]: pam_unix(sshd:session): session closed for user root Aug 03 16:40:15 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: session-3.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit session-3.scope has successfully entered the 'dead' state. Aug 03 16:40:15 ip-10-31-15-127.us-east-1.aws.redhat.com systemd-logind[612]: Session 3 logged out. Waiting for processes to exit. Aug 03 16:40:15 ip-10-31-15-127.us-east-1.aws.redhat.com systemd-logind[612]: Removed session 3. ░░ Subject: Session 3 has been terminated ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A session with the ID 3 has been terminated. Aug 03 16:40:15 ip-10-31-15-127.us-east-1.aws.redhat.com sshd-session[3804]: Accepted publickey for root from 10.29.163.172 port 59632 ssh2: RSA SHA256:W3cSdmPJK+d9RwU97ardijPXIZnxHswrpTHWW9oYtEU Aug 03 16:40:15 ip-10-31-15-127.us-east-1.aws.redhat.com sshd-session[3804]: pam_systemd(sshd:session): New sd-bus connection (system-bus-pam-systemd-3804) opened. Aug 03 16:40:15 ip-10-31-15-127.us-east-1.aws.redhat.com systemd-logind[612]: New session 4 of user root. ░░ Subject: A new session 4 has been created for user root ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A new session with the ID 4 has been created for the user root. ░░ ░░ The leading process of the session is 3804. Aug 03 16:40:15 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Started session-4.scope - Session 4 of User root. ░░ Subject: A start job for unit session-4.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit session-4.scope has finished successfully. ░░ ░░ The job identifier is 985. Aug 03 16:40:15 ip-10-31-15-127.us-east-1.aws.redhat.com sshd-session[3804]: pam_unix(sshd:session): session opened for user root(uid=0) by root(uid=0) Aug 03 16:40:15 ip-10-31-15-127.us-east-1.aws.redhat.com sshd-session[3807]: Received disconnect from 10.29.163.172 port 59632:11: disconnected by user Aug 03 16:40:15 ip-10-31-15-127.us-east-1.aws.redhat.com sshd-session[3807]: Disconnected from user root 10.29.163.172 port 59632 Aug 03 16:40:15 ip-10-31-15-127.us-east-1.aws.redhat.com sshd-session[3804]: pam_systemd(sshd:session): New sd-bus connection (system-bus-pam-systemd-3804) opened. Aug 03 16:40:15 ip-10-31-15-127.us-east-1.aws.redhat.com sshd-session[3804]: pam_unix(sshd:session): session closed for user root Aug 03 16:40:15 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: session-4.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit session-4.scope has successfully entered the 'dead' state. Aug 03 16:40:15 ip-10-31-15-127.us-east-1.aws.redhat.com systemd-logind[612]: Session 4 logged out. Waiting for processes to exit. Aug 03 16:40:15 ip-10-31-15-127.us-east-1.aws.redhat.com systemd-logind[612]: Removed session 4. ░░ Subject: Session 4 has been terminated ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A session with the ID 4 has been terminated. Aug 03 16:40:15 ip-10-31-15-127.us-east-1.aws.redhat.com sshd-session[3826]: Accepted publickey for root from 10.29.163.172 port 59638 ssh2: RSA SHA256:W3cSdmPJK+d9RwU97ardijPXIZnxHswrpTHWW9oYtEU Aug 03 16:40:15 ip-10-31-15-127.us-east-1.aws.redhat.com sshd-session[3826]: pam_systemd(sshd:session): New sd-bus connection (system-bus-pam-systemd-3826) opened. Aug 03 16:40:15 ip-10-31-15-127.us-east-1.aws.redhat.com systemd-logind[612]: New session 5 of user root. ░░ Subject: A new session 5 has been created for user root ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A new session with the ID 5 has been created for the user root. ░░ ░░ The leading process of the session is 3826. Aug 03 16:40:15 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Started session-5.scope - Session 5 of User root. ░░ Subject: A start job for unit session-5.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit session-5.scope has finished successfully. ░░ ░░ The job identifier is 1066. Aug 03 16:40:15 ip-10-31-15-127.us-east-1.aws.redhat.com sshd-session[3826]: pam_unix(sshd:session): session opened for user root(uid=0) by root(uid=0) Aug 03 16:40:15 ip-10-31-15-127.us-east-1.aws.redhat.com sshd-session[3829]: Received disconnect from 10.29.163.172 port 59638:11: disconnected by user Aug 03 16:40:15 ip-10-31-15-127.us-east-1.aws.redhat.com sshd-session[3829]: Disconnected from user root 10.29.163.172 port 59638 Aug 03 16:40:15 ip-10-31-15-127.us-east-1.aws.redhat.com sshd-session[3826]: pam_systemd(sshd:session): New sd-bus connection (system-bus-pam-systemd-3826) opened. Aug 03 16:40:15 ip-10-31-15-127.us-east-1.aws.redhat.com sshd-session[3826]: pam_unix(sshd:session): session closed for user root Aug 03 16:40:15 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: session-5.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit session-5.scope has successfully entered the 'dead' state. Aug 03 16:40:15 ip-10-31-15-127.us-east-1.aws.redhat.com systemd-logind[612]: Session 5 logged out. Waiting for processes to exit. Aug 03 16:40:15 ip-10-31-15-127.us-east-1.aws.redhat.com systemd-logind[612]: Removed session 5. ░░ Subject: Session 5 has been terminated ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A session with the ID 5 has been terminated. Aug 03 16:40:16 ip-10-31-15-127.us-east-1.aws.redhat.com sshd-session[3848]: Accepted publickey for root from 10.29.163.172 port 59642 ssh2: RSA SHA256:W3cSdmPJK+d9RwU97ardijPXIZnxHswrpTHWW9oYtEU Aug 03 16:40:16 ip-10-31-15-127.us-east-1.aws.redhat.com sshd-session[3848]: pam_systemd(sshd:session): New sd-bus connection (system-bus-pam-systemd-3848) opened. Aug 03 16:40:16 ip-10-31-15-127.us-east-1.aws.redhat.com systemd-logind[612]: New session 6 of user root. ░░ Subject: A new session 6 has been created for user root ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A new session with the ID 6 has been created for the user root. ░░ ░░ The leading process of the session is 3848. Aug 03 16:40:16 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Started session-6.scope - Session 6 of User root. ░░ Subject: A start job for unit session-6.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit session-6.scope has finished successfully. ░░ ░░ The job identifier is 1147. Aug 03 16:40:16 ip-10-31-15-127.us-east-1.aws.redhat.com sshd-session[3848]: pam_unix(sshd:session): session opened for user root(uid=0) by root(uid=0) Aug 03 16:40:30 ip-10-31-15-127.us-east-1.aws.redhat.com python3[3940]: ansible-ansible.legacy.setup Invoked with gather_subset=['all'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d Aug 03 16:40:32 ip-10-31-15-127.us-east-1.aws.redhat.com python3[3985]: ansible-service_facts Invoked Aug 03 16:40:33 ip-10-31-15-127.us-east-1.aws.redhat.com python3[4103]: ansible-ansible.legacy.command Invoked with _raw_params=rpm -qa | sort _uses_shell=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Aug 03 16:40:34 ip-10-31-15-127.us-east-1.aws.redhat.com python3[4124]: ansible-stat Invoked with path=/etc/yum.repos.d/qa-tools.repo follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Aug 03 16:40:35 ip-10-31-15-127.us-east-1.aws.redhat.com python3[4142]: ansible-ansible.legacy.dnf Invoked with name=['ca-certificates'] state=latest allow_downgrade=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 allowerasing=False nobest=False conf_file=None disable_excludes=None download_dir=None list=None releasever=None Aug 03 16:40:35 ip-10-31-15-127.us-east-1.aws.redhat.com python3[4161]: ansible-ansible.legacy.dnf Invoked with name=['curl'] state=latest allow_downgrade=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 allowerasing=False nobest=False conf_file=None disable_excludes=None download_dir=None list=None releasever=None Aug 03 16:40:35 ip-10-31-15-127.us-east-1.aws.redhat.com python3[4180]: ansible-ansible.legacy.command Invoked with _raw_params=curl -skL -o /etc/pki/ca-trust/source/anchors/Current-IT-Root-CAs.pem https://certs.corp.redhat.com/certs/Current-IT-Root-CAs.pem _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Aug 03 16:40:36 ip-10-31-15-127.us-east-1.aws.redhat.com python3[4200]: ansible-ansible.legacy.command Invoked with _raw_params=bash -c 'update-ca-trust' _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Aug 03 16:40:37 ip-10-31-15-127.us-east-1.aws.redhat.com python3[4228]: ansible-ansible.legacy.command Invoked with _raw_params=bash -c 'update-ca-trust extract' _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Aug 03 16:40:39 ip-10-31-15-127.us-east-1.aws.redhat.com python3[4256]: ansible-ansible.legacy.command Invoked with _raw_params=curl -v https://gitlab.cee.redhat.com _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Aug 03 16:40:39 ip-10-31-15-127.us-east-1.aws.redhat.com python3[4276]: ansible-ansible.legacy.command Invoked with _raw_params=curl -v https://beaker.engineering.redhat.com/ _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Aug 03 16:40:40 ip-10-31-15-127.us-east-1.aws.redhat.com python3[4296]: ansible-file Invoked with path=/etc/yum.repos.d/CentOS-Media.repo state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Aug 03 16:40:40 ip-10-31-15-127.us-east-1.aws.redhat.com python3[4314]: ansible-file Invoked with path=/etc/yum.repos.d/CentOS-Linux-Media.repo state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Aug 03 16:40:40 ip-10-31-15-127.us-east-1.aws.redhat.com python3[4332]: ansible-file Invoked with path=/etc/yum.repos.d/CentOS-Stream-Media.repo state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Aug 03 16:40:40 ip-10-31-15-127.us-east-1.aws.redhat.com python3[4350]: ansible-file Invoked with path=/etc/yum.repos.d/beaker-client-testing.repo state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Aug 03 16:40:41 ip-10-31-15-127.us-east-1.aws.redhat.com python3[4368]: ansible-stat Invoked with path=/etc/yum.repos.d/beaker-client.repo follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Aug 03 16:40:42 ip-10-31-15-127.us-east-1.aws.redhat.com python3[4388]: ansible-replace Invoked with path=/etc/yum.repos.d/beaker-client.repo regexp=7|\$releasever replace=8 backup=False encoding=utf-8 unsafe_writes=False after=None before=None validate=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Aug 03 16:40:42 ip-10-31-15-127.us-east-1.aws.redhat.com python3[4406]: ansible-stat Invoked with path=/etc/yum.repos.d/beaker-client.repo follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Aug 03 16:40:43 ip-10-31-15-127.us-east-1.aws.redhat.com python3[4426]: ansible-community.general.ini_file Invoked with path=/etc/yum.repos.d/beaker-client.repo section=beaker-client option=skip_if_unavailable value=1 backup=False state=present exclusive=True no_extra_spaces=False ignore_spaces=False allow_no_value=False modify_inactive_option=True create=True follow=False unsafe_writes=False values=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Aug 03 16:40:43 ip-10-31-15-127.us-east-1.aws.redhat.com python3[4444]: ansible-ansible.legacy.command Invoked with _raw_params=for repofile in $(ls -1 /etc/yum.repos.d/); do echo "# vvvvv $repofile vvvvv vvvvv"; cat "/etc/yum.repos.d/$repofile"; echo "# ^^^^^ ^^^^^ ^^^^^ ^^^^^"; echo ""; done _uses_shell=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Aug 03 16:40:43 ip-10-31-15-127.us-east-1.aws.redhat.com python3[4470]: ansible-ansible.legacy.command Invoked with _raw_params=yum -q repolist _uses_shell=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Aug 03 16:40:43 ip-10-31-15-127.us-east-1.aws.redhat.com python3[4489]: ansible-stat Invoked with path=/etc/yum.repos.d/baseos-ci.repo follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Aug 03 16:40:44 ip-10-31-15-127.us-east-1.aws.redhat.com python3[4507]: ansible-ansible.legacy.command Invoked with _raw_params=for repofile in $(ls -1 /etc/yum.repos.d/); do echo "# vvvvv $repofile vvvvv vvvvv"; cat "/etc/yum.repos.d/$repofile"; echo "# ^^^^^ ^^^^^ ^^^^^ ^^^^^"; echo ""; done _uses_shell=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Aug 03 16:40:44 ip-10-31-15-127.us-east-1.aws.redhat.com python3[4533]: ansible-ansible.legacy.command Invoked with _raw_params=yum -q repolist _uses_shell=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Aug 03 16:40:45 ip-10-31-15-127.us-east-1.aws.redhat.com python3[4552]: ansible-ansible.legacy.command Invoked with _raw_params=for repofile in $(ls -1 /etc/yum.repos.d/); do echo "# vvvvv $repofile vvvvv vvvvv"; cat "/etc/yum.repos.d/$repofile"; echo "# ^^^^^ ^^^^^ ^^^^^ ^^^^^"; echo ""; done _uses_shell=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Aug 03 16:40:45 ip-10-31-15-127.us-east-1.aws.redhat.com python3[4578]: ansible-ansible.legacy.command Invoked with _raw_params=yum -q repolist _uses_shell=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Aug 03 16:40:45 ip-10-31-15-127.us-east-1.aws.redhat.com python3[4597]: ansible-ansible.legacy.command Invoked with _raw_params=yum repolist --enablerepo '*' | grep -q rhel-buildroot _uses_shell=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Aug 03 16:40:46 ip-10-31-15-127.us-east-1.aws.redhat.com python3[4618]: ansible-ansible.legacy.command Invoked with _raw_params=for repofile in $(ls -1 /etc/yum.repos.d/); do echo "# vvvvv $repofile vvvvv vvvvv"; cat "/etc/yum.repos.d/$repofile"; echo "# ^^^^^ ^^^^^ ^^^^^ ^^^^^"; echo ""; done _uses_shell=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Aug 03 16:40:46 ip-10-31-15-127.us-east-1.aws.redhat.com python3[4644]: ansible-ansible.legacy.command Invoked with _raw_params=yum -q repolist _uses_shell=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Aug 03 16:40:47 ip-10-31-15-127.us-east-1.aws.redhat.com python3[4663]: ansible-ansible.legacy.command Invoked with _raw_params=for repofile in $(ls -1 /etc/yum.repos.d/); do echo "# vvvvv $repofile vvvvv vvvvv"; cat "/etc/yum.repos.d/$repofile"; echo "# ^^^^^ ^^^^^ ^^^^^ ^^^^^"; echo ""; done _uses_shell=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Aug 03 16:40:47 ip-10-31-15-127.us-east-1.aws.redhat.com python3[4689]: ansible-ansible.legacy.command Invoked with _raw_params=yum -q repolist _uses_shell=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Aug 03 16:40:47 ip-10-31-15-127.us-east-1.aws.redhat.com python3[4708]: ansible-ansible.legacy.command Invoked with _raw_params=dnf clean all --enablerepo=* _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Aug 03 16:40:48 ip-10-31-15-127.us-east-1.aws.redhat.com python3[4727]: ansible-ansible.legacy.command Invoked with _raw_params=dnf makecache --enablerepo=* _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Aug 03 16:40:58 ip-10-31-15-127.us-east-1.aws.redhat.com python3[4803]: ansible-ansible.legacy.dnf Invoked with name=['createrepo'] state=latest allow_downgrade=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 allowerasing=False nobest=False conf_file=None disable_excludes=None download_dir=None list=None releasever=None Aug 03 16:41:01 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Started run-r7add66fa0cd14ff4bbdb8d7e1f59c760.service - /usr/bin/systemctl start man-db-cache-update. ░░ Subject: A start job for unit run-r7add66fa0cd14ff4bbdb8d7e1f59c760.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit run-r7add66fa0cd14ff4bbdb8d7e1f59c760.service has finished successfully. ░░ ░░ The job identifier is 1228. Aug 03 16:41:01 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Starting man-db-cache-update.service... ░░ Subject: A start job for unit man-db-cache-update.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit man-db-cache-update.service has begun execution. ░░ ░░ The job identifier is 1305. Aug 03 16:41:02 ip-10-31-15-127.us-east-1.aws.redhat.com python3[4830]: ansible-ansible.legacy.dnf Invoked with name=['make'] state=latest allow_downgrade=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 allowerasing=False nobest=False conf_file=None disable_excludes=None download_dir=None list=None releasever=None Aug 03 16:41:02 ip-10-31-15-127.us-east-1.aws.redhat.com python3[4849]: ansible-ansible.legacy.dnf Invoked with name=['parted'] state=latest allow_downgrade=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 allowerasing=False nobest=False conf_file=None disable_excludes=None download_dir=None list=None releasever=None Aug 03 16:41:03 ip-10-31-15-127.us-east-1.aws.redhat.com python3[4868]: ansible-ansible.legacy.dnf Invoked with name=['psmisc'] state=latest allow_downgrade=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 allowerasing=False nobest=False conf_file=None disable_excludes=None download_dir=None list=None releasever=None Aug 03 16:41:03 ip-10-31-15-127.us-east-1.aws.redhat.com python3[4887]: ansible-ansible.legacy.dnf Invoked with name=['chrony'] state=latest allow_downgrade=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 allowerasing=False nobest=False conf_file=None disable_excludes=None download_dir=None list=None releasever=None Aug 03 16:41:03 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: man-db-cache-update.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit man-db-cache-update.service has successfully entered the 'dead' state. Aug 03 16:41:03 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Finished man-db-cache-update.service. ░░ Subject: A start job for unit man-db-cache-update.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit man-db-cache-update.service has finished successfully. ░░ ░░ The job identifier is 1305. Aug 03 16:41:03 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: run-r7add66fa0cd14ff4bbdb8d7e1f59c760.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-r7add66fa0cd14ff4bbdb8d7e1f59c760.service has successfully entered the 'dead' state. Aug 03 16:41:04 ip-10-31-15-127.us-east-1.aws.redhat.com python3[4910]: ansible-ansible.legacy.command Invoked with _raw_params=bash -c "chronyc sources" _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Aug 03 16:41:08 ip-10-31-15-127.us-east-1.aws.redhat.com python3[4929]: ansible-ansible.legacy.command Invoked with _raw_params=bash -c "chronyc sources | grep redhat | grep -v util.phx2.redhat || chronyc add server clock.corp.redhat.com iburst" _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Aug 03 16:41:19 ip-10-31-15-127.us-east-1.aws.redhat.com python3[4951]: ansible-ansible.legacy.command Invoked with _raw_params=bash -c "chronyc sources" _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Aug 03 16:41:26 ip-10-31-15-127.us-east-1.aws.redhat.com python3[4970]: ansible-ansible.legacy.command Invoked with _raw_params=chronyc waitsync 5 _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Aug 03 16:41:26 ip-10-31-15-127.us-east-1.aws.redhat.com python3[4989]: ansible-ansible.legacy.command Invoked with _raw_params=setenforce 0; timedatectl set-timezone UTC; setenforce 1 _uses_shell=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Aug 03 16:41:26 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Starting systemd-timedated.service - Time & Date Service... ░░ Subject: A start job for unit systemd-timedated.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-timedated.service has begun execution. ░░ ░░ The job identifier is 1382. Aug 03 16:41:26 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Started systemd-timedated.service - Time & Date Service. ░░ Subject: A start job for unit systemd-timedated.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-timedated.service has finished successfully. ░░ ░░ The job identifier is 1382. Aug 03 16:41:26 ip-10-31-15-127.us-east-1.aws.redhat.com systemd-timedated[4993]: Changed time zone to 'UTC' (UTC). ░░ Subject: Time zone change to UTC ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The system timezone has been changed to UTC. Aug 03 16:41:27 ip-10-31-15-127.us-east-1.aws.redhat.com python3[5011]: ansible-yum_repository Invoked with state=present name=beaker-tasks description=Beaker tasks baseurl=['http://beaker.engineering.redhat.com/rpms/'] gpgcheck=False enabled=True reposdir=/etc/yum.repos.d unsafe_writes=False bandwidth=None cost=None deltarpm_metadata_percentage=None deltarpm_percentage=None enablegroups=None exclude=None failovermethod=None file=None gpgcakey=None gpgkey=None module_hotfixes=None http_caching=None include=None includepkgs=None ip_resolve=None keepalive=None keepcache=None metadata_expire=None metadata_expire_filter=None metalink=None mirrorlist=None mirrorlist_expire=None params=None password=NOT_LOGGING_PARAMETER priority=None protect=None proxy=None proxy_password=NOT_LOGGING_PARAMETER proxy_username=None repo_gpgcheck=None retries=None s3_enabled=None skip_if_unavailable=None sslcacert=None ssl_check_cert_permissions=None sslclientcert=None sslclientkey=None sslverify=None throttle=None timeout=None ui_repoid_vars=None username=None async=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Aug 03 16:41:27 ip-10-31-15-127.us-east-1.aws.redhat.com python3[5029]: ansible-user Invoked with name=root update_password=always password=NOT_LOGGING_PARAMETER state=present non_unique=False force=False remove=False create_home=True system=False move_home=False append=False ssh_key_bits=0 ssh_key_type=rsa ssh_key_comment=ansible-generated on ip-10-31-15-127.us-east-1.aws.redhat.com uid=None group=None groups=None comment=None home=None shell=None login_class=None password_expire_max=None password_expire_min=None hidden=None seuser=None skeleton=None generate_ssh_key=None ssh_key_file=None ssh_key_passphrase=NOT_LOGGING_PARAMETER expires=None password_lock=None local=None profile=None authorization=None role=None umask=None Aug 03 16:41:27 ip-10-31-15-127.us-east-1.aws.redhat.com usermod[5032]: change user 'root' password Aug 03 16:41:28 ip-10-31-15-127.us-east-1.aws.redhat.com python3[5054]: ansible-lineinfile Invoked with dest=/etc/ssh/sshd_config regexp=#?PasswordAuthentication (?:yes|no) line=PasswordAuthentication yes state=present path=/etc/ssh/sshd_config backrefs=False create=False backup=False firstmatch=False unsafe_writes=False search_string=None insertafter=None insertbefore=None validate=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Aug 03 16:41:28 ip-10-31-15-127.us-east-1.aws.redhat.com python3[5072]: ansible-lineinfile Invoked with dest=/etc/ssh/sshd_config line=PermitRootLogin yes state=present path=/etc/ssh/sshd_config backrefs=False create=False backup=False firstmatch=False unsafe_writes=False regexp=None search_string=None insertafter=None insertbefore=None validate=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Aug 03 16:41:28 ip-10-31-15-127.us-east-1.aws.redhat.com python3[5090]: ansible-file Invoked with path=/var/lib/tft state=directory recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Aug 03 16:41:28 ip-10-31-15-127.us-east-1.aws.redhat.com python3[5108]: ansible-stat Invoked with path=/var/lib/tft/lib.sh follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Aug 03 16:41:29 ip-10-31-15-127.us-east-1.aws.redhat.com python3[5166]: ansible-ansible.legacy.stat Invoked with path=/var/lib/tft/lib.sh follow=False get_checksum=True checksum_algorithm=sha1 get_md5=False get_mime=True get_attributes=True Aug 03 16:41:29 ip-10-31-15-127.us-east-1.aws.redhat.com python3[5219]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1722703288.7922935-9143-9606793578585/source dest=/var/lib/tft/lib.sh owner=root group=root mode=u=rx,go= follow=False _original_basename=lib.sh.j2 checksum=af779f2a149bc6695c9d2d1622342e81371886ab backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Aug 03 16:41:29 ip-10-31-15-127.us-east-1.aws.redhat.com python3[5255]: ansible-stat Invoked with path=/usr/local/bin/ci-extendtesttime.sh follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Aug 03 16:41:29 ip-10-31-15-127.us-east-1.aws.redhat.com rsyslogd[611]: imjournal: journal files changed, reloading... [v8.2312.0-2.el10 try https://www.rsyslog.com/e/0 ] Aug 03 16:41:29 ip-10-31-15-127.us-east-1.aws.redhat.com python3[5314]: ansible-ansible.legacy.stat Invoked with path=/usr/local/bin/ci-extendtesttime.sh follow=False get_checksum=True checksum_algorithm=sha1 get_md5=False get_mime=True get_attributes=True Aug 03 16:41:30 ip-10-31-15-127.us-east-1.aws.redhat.com python3[5367]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1722703289.6370099-9156-125064816134732/source dest=/usr/local/bin/ci-extendtesttime.sh owner=root group=root mode=u=rx,go= follow=False _original_basename=ci-extendtesttime.sh.j2 checksum=2d0f6c296873c17e8b22c9490b000973b2a8a350 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Aug 03 16:41:30 ip-10-31-15-127.us-east-1.aws.redhat.com python3[5403]: ansible-stat Invoked with path=/usr/bin/extendtesttime.sh follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Aug 03 16:41:30 ip-10-31-15-127.us-east-1.aws.redhat.com python3[5421]: ansible-stat Invoked with path=/usr/local/bin/ci-return2pool.sh follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Aug 03 16:41:30 ip-10-31-15-127.us-east-1.aws.redhat.com python3[5479]: ansible-ansible.legacy.stat Invoked with path=/usr/local/bin/ci-return2pool.sh follow=False get_checksum=True checksum_algorithm=sha1 get_md5=False get_mime=True get_attributes=True Aug 03 16:41:31 ip-10-31-15-127.us-east-1.aws.redhat.com python3[5532]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1722703290.6154213-9176-210523413906836/source dest=/usr/local/bin/ci-return2pool.sh owner=root group=root mode=u=rx,go= follow=False _original_basename=ci-return2pool.sh.j2 checksum=f77cae3b2a729c60bcb0c01c82bf347f13a9b2c5 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Aug 03 16:41:31 ip-10-31-15-127.us-east-1.aws.redhat.com python3[5568]: ansible-stat Invoked with path=/etc/motd follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Aug 03 16:41:31 ip-10-31-15-127.us-east-1.aws.redhat.com python3[5628]: ansible-ansible.legacy.stat Invoked with path=/root/.guest-metadata.json follow=False get_checksum=True checksum_algorithm=sha1 get_md5=False get_mime=True get_attributes=True Aug 03 16:41:31 ip-10-31-15-127.us-east-1.aws.redhat.com python3[5681]: ansible-ansible.legacy.copy Invoked with dest=/root/.guest-metadata.json owner=root group=root mode=u=rw,go= src=/root/.ansible/tmp/ansible-tmp-1722703291.3756824-9202-125095011241297/source _original_basename=tmpmial_d_c follow=False checksum=bf21a9e8fbc5a3846fb05b4fa0859e0917b2202f backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Aug 03 16:41:32 ip-10-31-15-127.us-east-1.aws.redhat.com python3[5717]: ansible-ansible.legacy.command Invoked with _raw_params=curl -sLI --connect-timeout 5 -w '%{response_code}' http://169.254.169.254/latest/meta-data/instance-id | grep ^200 _uses_shell=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Aug 03 16:41:32 ip-10-31-15-127.us-east-1.aws.redhat.com python3[5738]: ansible-hostname Invoked with name=721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm use=None Aug 03 16:41:32 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Starting systemd-hostnamed.service - Hostname Service... ░░ Subject: A start job for unit systemd-hostnamed.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-hostnamed.service has begun execution. ░░ ░░ The job identifier is 1459. Aug 03 16:41:32 ip-10-31-15-127.us-east-1.aws.redhat.com systemd[1]: Started systemd-hostnamed.service - Hostname Service. ░░ Subject: A start job for unit systemd-hostnamed.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-hostnamed.service has finished successfully. ░░ ░░ The job identifier is 1459. Aug 03 16:41:32 ip-10-31-15-127.us-east-1.aws.redhat.com systemd-hostnamed[5742]: Changed pretty hostname to '721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm' Aug 03 16:41:32 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm systemd-hostnamed[5742]: Hostname set to <721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm> (static) Aug 03 16:41:32 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm NetworkManager[668]: [1722703292.6855] hostname: static hostname changed from "ip-10-31-15-127.us-east-1.aws.redhat.com" to "721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm" Aug 03 16:41:32 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm systemd[1]: Starting NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service... ░░ Subject: A start job for unit NetworkManager-dispatcher.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has begun execution. ░░ ░░ The job identifier is 1536. Aug 03 16:41:32 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm systemd[1]: Started NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service. ░░ Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has finished successfully. ░░ ░░ The job identifier is 1536. Aug 03 16:41:32 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3[5772]: ansible-ansible.legacy.command Invoked with _raw_params=ping -4 -c1 -W10 $(hostname) _uses_shell=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Aug 03 16:41:33 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3[5792]: ansible-file Invoked with path=/var/log/messages state=touch recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Aug 03 16:41:33 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3[5810]: ansible-ansible.legacy.command Invoked with _raw_params=setsebool nis_enabled on _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Aug 03 16:41:33 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm setsebool[5811]: The nis_enabled policy boolean was changed to on by root Aug 03 16:41:33 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3[5829]: ansible-stat Invoked with path=/usr/bin/rstrnt-package follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Aug 03 16:41:34 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3[5849]: ansible-ansible.legacy.command Invoked with _raw_params=sed -e 's/rpm -q $package/rpm -q --whatprovides $package/' -i /usr/bin/rstrnt-package _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Aug 03 16:41:34 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3[5868]: ansible-stat Invoked with path=/var/lib/restraint follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Aug 03 16:41:34 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3[5888]: ansible-file Invoked with path=/var/lib/restraint/avc_since state=touch recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Aug 03 16:41:34 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3[5906]: ansible-stat Invoked with path=/usr/share/beakerlib/beakerlib.sh follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Aug 03 16:41:35 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3[5926]: ansible-file Invoked with dest=/usr/lib/beakerlib state=directory path=/usr/lib/beakerlib recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Aug 03 16:41:35 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3[5944]: ansible-file Invoked with dest=/usr/share/rhts-library state=directory path=/usr/share/rhts-library recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Aug 03 16:41:35 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3[5962]: ansible-file Invoked with src=/usr/share/beakerlib/beakerlib.sh dest=/usr/lib/beakerlib/beakerlib.sh state=link path=/usr/lib/beakerlib/beakerlib.sh recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Aug 03 16:41:35 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3[5980]: ansible-file Invoked with src=/usr/share/beakerlib/beakerlib.sh dest=/usr/share/rhts-library/rhtslib.sh state=link path=/usr/share/rhts-library/rhtslib.sh recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Aug 03 16:41:35 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3[5998]: ansible-ansible.legacy.command Invoked with _raw_params=mv /var/log/audit/audit.log /var/log/audit/audit.log.bak _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Aug 03 16:41:36 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3[6017]: ansible-ansible.legacy.command Invoked with _raw_params=/usr/sbin/service auditd restart _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Aug 03 16:41:36 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm auditd[580]: The audit daemon is exiting. Aug 03 16:41:36 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: audit: type=1305 audit(1722703296.152:732): op=set audit_pid=0 old=580 auid=4294967295 ses=4294967295 subj=system_u:system_r:auditd_t:s0 res=1 Aug 03 16:41:36 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm systemd[1]: auditd.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit auditd.service has successfully entered the 'dead' state. Aug 03 16:41:36 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: audit: type=1131 audit(1722703296.156:733): pid=1 uid=0 auid=4294967295 ses=4294967295 subj=system_u:system_r:init_t:s0 msg='unit=auditd comm="systemd" exe="/usr/lib/systemd/systemd" hostname=? addr=? terminal=? res=success' Aug 03 16:41:36 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: audit: type=2310 audit(1722703296.156:734): pid=603 uid=81 auid=4294967295 ses=4294967295 subj=system_u:system_r:system_dbusd_t:s0-s0:c0.c1023 msg='avc: op=load_policy lsm=selinux seqno=2 res=1 exe="/usr/bin/dbus-broker" sauid=81 hostname=? addr=? terminal=?' Aug 03 16:41:37 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm systemd[1]: Starting audit-rules.service - Load Audit Rules... ░░ Subject: A start job for unit audit-rules.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit audit-rules.service has begun execution. ░░ ░░ The job identifier is 1615. Aug 03 16:41:37 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm augenrules[6027]: /sbin/augenrules: No change Aug 03 16:41:37 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: audit: type=1305 audit(1722703297.189:735): op=set audit_backlog_limit=8192 old=8192 auid=4294967295 ses=4294967295 subj=system_u:system_r:unconfined_service_t:s0 res=1 Aug 03 16:41:37 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm augenrules[6037]: No rules Aug 03 16:41:37 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm augenrules[6037]: enabled 1 Aug 03 16:41:37 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm augenrules[6037]: failure 1 Aug 03 16:41:37 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm augenrules[6037]: pid 0 Aug 03 16:41:37 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm augenrules[6037]: rate_limit 0 Aug 03 16:41:37 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm augenrules[6037]: backlog_limit 8192 Aug 03 16:41:37 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm augenrules[6037]: lost 0 Aug 03 16:41:37 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm augenrules[6037]: backlog 3 Aug 03 16:41:37 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm augenrules[6037]: backlog_wait_time 60000 Aug 03 16:41:37 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm augenrules[6037]: backlog_wait_time_actual 0 Aug 03 16:41:37 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm augenrules[6037]: enabled 1 Aug 03 16:41:37 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm augenrules[6037]: failure 1 Aug 03 16:41:37 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm augenrules[6037]: pid 0 Aug 03 16:41:37 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm augenrules[6037]: rate_limit 0 Aug 03 16:41:37 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm augenrules[6037]: backlog_limit 8192 Aug 03 16:41:37 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm augenrules[6037]: lost 0 Aug 03 16:41:37 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm augenrules[6037]: backlog 7 Aug 03 16:41:37 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm augenrules[6037]: backlog_wait_time 60000 Aug 03 16:41:37 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm augenrules[6037]: backlog_wait_time_actual 0 Aug 03 16:41:37 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm augenrules[6037]: enabled 1 Aug 03 16:41:37 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm augenrules[6037]: failure 1 Aug 03 16:41:37 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm augenrules[6037]: pid 0 Aug 03 16:41:37 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm augenrules[6037]: rate_limit 0 Aug 03 16:41:37 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm augenrules[6037]: backlog_limit 8192 Aug 03 16:41:37 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm augenrules[6037]: lost 0 Aug 03 16:41:37 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm augenrules[6037]: backlog 11 Aug 03 16:41:37 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm augenrules[6037]: backlog_wait_time 60000 Aug 03 16:41:37 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm augenrules[6037]: backlog_wait_time_actual 0 Aug 03 16:41:37 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: audit: type=1300 audit(1722703297.189:735): arch=c000003e syscall=44 success=yes exit=60 a0=3 a1=7ffc90be7840 a2=3c a3=0 items=0 ppid=6027 pid=6037 auid=4294967295 uid=0 gid=0 euid=0 suid=0 fsuid=0 egid=0 sgid=0 fsgid=0 tty=(none) ses=4294967295 comm="auditctl" exe="/usr/sbin/auditctl" subj=system_u:system_r:unconfined_service_t:s0 key=(null) Aug 03 16:41:37 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: audit: type=1327 audit(1722703297.189:735): proctitle=2F7362696E2F617564697463746C002D52002F6574632F61756469742F61756469742E72756C6573 Aug 03 16:41:37 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: audit: type=1305 audit(1722703297.189:736): op=set audit_failure=1 old=1 auid=4294967295 ses=4294967295 subj=system_u:system_r:unconfined_service_t:s0 res=1 Aug 03 16:41:37 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: audit: type=1300 audit(1722703297.189:736): arch=c000003e syscall=44 success=yes exit=60 a0=3 a1=7ffc90be7840 a2=3c a3=0 items=0 ppid=6027 pid=6037 auid=4294967295 uid=0 gid=0 euid=0 suid=0 fsuid=0 egid=0 sgid=0 fsgid=0 tty=(none) ses=4294967295 comm="auditctl" exe="/usr/sbin/auditctl" subj=system_u:system_r:unconfined_service_t:s0 key=(null) Aug 03 16:41:37 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: audit: type=1327 audit(1722703297.189:736): proctitle=2F7362696E2F617564697463746C002D52002F6574632F61756469742F61756469742E72756C6573 Aug 03 16:41:37 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: audit: type=1305 audit(1722703297.189:737): op=set audit_backlog_wait_time=60000 old=60000 auid=4294967295 ses=4294967295 subj=system_u:system_r:unconfined_service_t:s0 res=1 Aug 03 16:41:37 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm systemd[1]: audit-rules.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit audit-rules.service has successfully entered the 'dead' state. Aug 03 16:41:37 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm systemd[1]: Finished audit-rules.service - Load Audit Rules. ░░ Subject: A start job for unit audit-rules.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit audit-rules.service has finished successfully. ░░ ░░ The job identifier is 1615. Aug 03 16:41:37 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm systemd[1]: Starting auditd.service - Security Audit Logging Service... ░░ Subject: A start job for unit auditd.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit auditd.service has begun execution. ░░ ░░ The job identifier is 1614. Aug 03 16:41:37 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm auditd[6046]: No plugins found, not dispatching events Aug 03 16:41:37 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm auditd[6046]: Init complete, auditd 4.0 listening for events (startup state enable) Aug 03 16:41:37 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm systemd[1]: Started auditd.service - Security Audit Logging Service. ░░ Subject: A start job for unit auditd.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit auditd.service has finished successfully. ░░ ░░ The job identifier is 1614. Aug 03 16:41:37 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3[6067]: ansible-stat Invoked with path=/etc/NetworkManager/conf.d follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Aug 03 16:41:37 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3[6127]: ansible-ansible.legacy.stat Invoked with path=/etc/NetworkManager/conf.d/99-zzz-tft-workaround-dns-default.conf follow=False get_checksum=True checksum_algorithm=sha1 get_md5=False get_mime=True get_attributes=True Aug 03 16:41:37 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3[6180]: ansible-ansible.legacy.copy Invoked with dest=/etc/NetworkManager/conf.d/99-zzz-tft-workaround-dns-default.conf src=/root/.ansible/tmp/ansible-tmp-1722703297.5009277-9344-116220217129658/source _original_basename=tmpa81l80ed follow=False checksum=0145f3ae57eef5aa08bbb678fedbb3edd001cd2d backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Aug 03 16:41:38 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3[6216]: ansible-ansible.legacy.command Invoked with _raw_params=for repofile in $(ls -1 /etc/yum.repos.d/); do echo "# vvvvv $repofile vvvvv vvvvv"; cat "/etc/yum.repos.d/$repofile"; echo "# ^^^^^ ^^^^^ ^^^^^ ^^^^^"; echo ""; done _uses_shell=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Aug 03 16:41:38 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3[6242]: ansible-ansible.legacy.command Invoked with _raw_params=yum -q repolist _uses_shell=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Aug 03 16:41:39 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3[6261]: ansible-ansible.legacy.command Invoked with _raw_params=dnf clean all --enablerepo=* _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Aug 03 16:41:39 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3[6280]: ansible-ansible.legacy.command Invoked with _raw_params=dnf makecache --enablerepo=* _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Aug 03 16:41:42 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state. Aug 03 16:41:50 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3[6394]: ansible-ansible.legacy.setup Invoked with gather_subset=['all'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d Aug 03 16:41:51 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3[6439]: ansible-service_facts Invoked Aug 03 16:41:53 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3[6556]: ansible-ansible.legacy.command Invoked with _raw_params=rpm -qa | sort _uses_shell=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Aug 03 16:41:53 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3[6577]: ansible-ansible.legacy.command Invoked with _raw_params=if [ $(find /etc/yum.repos.d/ -name 'epel.repo' | wc -l ) -gt 0 ]; then dnf config-manager --set-disabled epel; fi _uses_shell=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Aug 03 16:41:54 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3[6599]: ansible-ansible.legacy.command Invoked with _raw_params=dnf clean all --enablerepo=* _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Aug 03 16:41:55 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3[6618]: ansible-ansible.legacy.command Invoked with _raw_params=dnf makecache --enablerepo=* _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Aug 03 16:41:56 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm systemd[1]: systemd-timedated.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-timedated.service has successfully entered the 'dead' state. Aug 03 16:42:02 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm systemd[1]: systemd-hostnamed.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-hostnamed.service has successfully entered the 'dead' state. Aug 03 16:42:06 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3[6737]: ansible-ansible.legacy.setup Invoked with gather_subset=['all'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d Aug 03 16:42:07 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3[6782]: ansible-service_facts Invoked Aug 03 16:42:09 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3[6900]: ansible-ansible.legacy.command Invoked with _raw_params=rpm -qa | sort _uses_shell=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Aug 03 16:42:10 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3[6958]: ansible-ansible.legacy.setup Invoked with gather_subset=['all'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d Aug 03 16:42:11 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3[7003]: ansible-ansible.legacy.dnf Invoked with name=['ca-certificates'] state=latest allow_downgrade=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 allowerasing=False nobest=False conf_file=None disable_excludes=None download_dir=None list=None releasever=None Aug 03 16:42:12 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3[7022]: ansible-ansible.legacy.dnf Invoked with name=['curl'] state=latest allow_downgrade=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 allowerasing=False nobest=False conf_file=None disable_excludes=None download_dir=None list=None releasever=None Aug 03 16:42:12 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3[7041]: ansible-ansible.legacy.command Invoked with _raw_params=curl -skL -o /etc/pki/ca-trust/source/anchors/Current-IT-Root-CAs.pem https://certs.corp.redhat.com/certs/Current-IT-Root-CAs.pem _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Aug 03 16:42:13 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3[7061]: ansible-ansible.legacy.command Invoked with _raw_params=bash -c 'update-ca-trust' _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Aug 03 16:42:14 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3[7089]: ansible-ansible.legacy.command Invoked with _raw_params=bash -c 'update-ca-trust extract' _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Aug 03 16:42:15 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3[7117]: ansible-ansible.legacy.command Invoked with _raw_params=curl -v https://gitlab.cee.redhat.com _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Aug 03 16:42:15 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3[7137]: ansible-ansible.legacy.command Invoked with _raw_params=curl -v https://beaker.engineering.redhat.com/ _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Aug 03 16:42:16 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3[7157]: ansible-service_facts Invoked Aug 03 16:42:18 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3[7275]: ansible-ansible.legacy.command Invoked with _raw_params=rpm -qa | sort _uses_shell=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Aug 03 16:42:18 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3[7296]: ansible-service_facts Invoked Aug 03 16:42:20 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3[7414]: ansible-ansible.legacy.command Invoked with _raw_params=rpm -qa | sort _uses_shell=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Aug 03 16:42:20 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3[7435]: ansible-ansible.legacy.command Invoked with _raw_params=for repofile in $(ls -1 /etc/yum.repos.d/); do echo "# vvvvv $repofile vvvvv vvvvv"; cat "/etc/yum.repos.d/$repofile"; echo "# ^^^^^ ^^^^^ ^^^^^ ^^^^^"; echo ""; done _uses_shell=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Aug 03 16:42:21 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3[7461]: ansible-ansible.legacy.command Invoked with _raw_params=yum -q repolist _uses_shell=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Aug 03 16:42:21 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3[7480]: ansible-ansible.legacy.command Invoked with _raw_params=dnf clean all --enablerepo=* _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Aug 03 16:42:22 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3[7499]: ansible-ansible.legacy.command Invoked with _raw_params=dnf makecache --enablerepo=* _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Aug 03 16:42:33 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3.12[7612]: ansible-ansible.legacy.setup Invoked with gather_subset=['all'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d Aug 03 16:42:34 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3.12[7659]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Aug 03 16:42:34 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3.12[7677]: ansible-ansible.legacy.dnf Invoked with name=['python3-pyasn1', 'python3-cryptography', 'python3-dbus'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Aug 03 16:42:37 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3.12[7700]: ansible-ansible.legacy.dnf Invoked with name=['certmonger', 'python3-packaging'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Aug 03 16:42:39 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm dbus-broker-launch[602]: Noticed file-system modification, trigger reload. ░░ Subject: A configuration directory was written to ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ A write was detected to one of the directories containing D-Bus configuration ░░ files, triggering a configuration reload. ░░ ░░ This functionality exists for backwards compatibility to pick up changes to ░░ D-Bus configuration without an explicit reolad request. Typically when ░░ installing or removing third-party software causes D-Bus configuration files ░░ to be added or removed. ░░ ░░ It is worth noting that this may cause partial configuration to be loaded in ░░ case dispatching this notification races with the writing of the configuration ░░ files. However, a future notification will then cause the configuration to be ░░ reladed again. Aug 03 16:42:39 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm dbus-broker-launch[602]: Noticed file-system modification, trigger reload. ░░ Subject: A configuration directory was written to ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ A write was detected to one of the directories containing D-Bus configuration ░░ files, triggering a configuration reload. ░░ ░░ This functionality exists for backwards compatibility to pick up changes to ░░ D-Bus configuration without an explicit reolad request. Typically when ░░ installing or removing third-party software causes D-Bus configuration files ░░ to be added or removed. ░░ ░░ It is worth noting that this may cause partial configuration to be loaded in ░░ case dispatching this notification races with the writing of the configuration ░░ files. However, a future notification will then cause the configuration to be ░░ reladed again. Aug 03 16:42:39 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm dbus-broker-launch[602]: Noticed file-system modification, trigger reload. ░░ Subject: A configuration directory was written to ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ A write was detected to one of the directories containing D-Bus configuration ░░ files, triggering a configuration reload. ░░ ░░ This functionality exists for backwards compatibility to pick up changes to ░░ D-Bus configuration without an explicit reolad request. Typically when ░░ installing or removing third-party software causes D-Bus configuration files ░░ to be added or removed. ░░ ░░ It is worth noting that this may cause partial configuration to be loaded in ░░ case dispatching this notification races with the writing of the configuration ░░ files. However, a future notification will then cause the configuration to be ░░ reladed again. Aug 03 16:42:39 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm systemd[1]: Reload requested from client PID 7708 ('systemctl') (unit session-6.scope)... Aug 03 16:42:39 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm systemd[1]: Reloading... Aug 03 16:42:39 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm systemd[1]: Reloading finished in 169 ms. Aug 03 16:42:39 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm systemd[1]: Started run-rc4a75727ad5043baaa3a7236ce1063c8.service - /usr/bin/systemctl start man-db-cache-update. ░░ Subject: A start job for unit run-rc4a75727ad5043baaa3a7236ce1063c8.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit run-rc4a75727ad5043baaa3a7236ce1063c8.service has finished successfully. ░░ ░░ The job identifier is 1625. Aug 03 16:42:39 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm systemd[1]: Starting man-db-cache-update.service... ░░ Subject: A start job for unit man-db-cache-update.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit man-db-cache-update.service has begun execution. ░░ ░░ The job identifier is 1702. Aug 03 16:42:39 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm systemd[1]: Reload requested from client PID 7761 ('systemctl') (unit session-6.scope)... Aug 03 16:42:39 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm systemd[1]: Reloading... Aug 03 16:42:39 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm systemd[1]: Reloading finished in 205 ms. Aug 03 16:42:39 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm systemd[1]: Queuing reload/restart jobs for marked units… Aug 03 16:42:40 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3.12[7840]: ansible-file Invoked with name=/etc/certmonger//pre-scripts owner=root group=root mode=0700 state=directory path=/etc/certmonger//pre-scripts recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Aug 03 16:42:40 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3.12[7858]: ansible-file Invoked with name=/etc/certmonger//post-scripts owner=root group=root mode=0700 state=directory path=/etc/certmonger//post-scripts recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Aug 03 16:42:41 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3.12[7876]: ansible-ansible.legacy.systemd Invoked with name=certmonger state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Aug 03 16:42:41 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm systemd[1]: Reload requested from client PID 7879 ('systemctl') (unit session-6.scope)... Aug 03 16:42:41 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm systemd[1]: Reloading... Aug 03 16:42:41 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm systemd[1]: Reloading finished in 180 ms. Aug 03 16:42:41 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm systemd[1]: Starting certmonger.service - Certificate monitoring and PKI enrollment... ░░ Subject: A start job for unit certmonger.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit certmonger.service has begun execution. ░░ ░░ The job identifier is 1779. Aug 03 16:42:41 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm (rtmonger)[7927]: certmonger.service: Referenced but unset environment variable evaluates to an empty string: OPTS Aug 03 16:42:41 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm systemd[1]: Started certmonger.service - Certificate monitoring and PKI enrollment. ░░ Subject: A start job for unit certmonger.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit certmonger.service has finished successfully. ░░ ░░ The job identifier is 1779. Aug 03 16:42:42 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3.12[7969]: ansible-fedora.linux_system_roles.certificate_request Invoked with name=quadlet_demo dns=['localhost'] directory=/etc/pki/tls wait=True ca=self-sign __header=# # Ansible managed # # system_role:certificate provider_config_directory=/etc/certmonger provider=certmonger key_usage=['digitalSignature', 'keyEncipherment'] extended_key_usage=['id-kp-serverAuth', 'id-kp-clientAuth'] auto_renew=True ip=None email=None common_name=None country=None state=None locality=None organization=None organizational_unit=None contact_email=None key_size=None owner=None group=None mode=None principal=None run_before=None run_after=None Aug 03 16:42:42 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm certmonger[7927]: 2024-08-03 16:42:42 [7927] Wrote to /var/lib/certmonger/requests/20240803164242 Aug 03 16:42:42 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm certmonger[7927]: 2024-08-03 16:42:42 [7927] Wrote to /var/lib/certmonger/requests/20240803164242 Aug 03 16:42:42 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm certmonger[7927]: 2024-08-03 16:42:42 [7927] Wrote to /var/lib/certmonger/requests/20240803164242 Aug 03 16:42:42 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm certmonger[7927]: 2024-08-03 16:42:42 [7927] Wrote to /var/lib/certmonger/requests/20240803164242 Aug 03 16:42:42 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm certmonger[7927]: 2024-08-03 16:42:42 [7927] Wrote to /var/lib/certmonger/requests/20240803164242 Aug 03 16:42:42 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm certmonger[7927]: 2024-08-03 16:42:42 [7927] Wrote to /var/lib/certmonger/requests/20240803164242 Aug 03 16:42:42 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm certmonger[7927]: 2024-08-03 16:42:42 [7927] Wrote to /var/lib/certmonger/requests/20240803164242 Aug 03 16:42:42 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm certmonger[7927]: 2024-08-03 16:42:42 [7927] Wrote to /var/lib/certmonger/requests/20240803164242 Aug 03 16:42:42 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm certmonger[7927]: 2024-08-03 16:42:42 [7927] Wrote to /var/lib/certmonger/requests/20240803164242 Aug 03 16:42:42 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm certmonger[7927]: 2024-08-03 16:42:42 [7927] Wrote to /var/lib/certmonger/requests/20240803164242 Aug 03 16:42:42 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm certmonger[7927]: 2024-08-03 16:42:42 [7927] Wrote to /var/lib/certmonger/requests/20240803164242 Aug 03 16:42:42 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm certmonger[7927]: 2024-08-03 16:42:42 [7927] Wrote to /var/lib/certmonger/requests/20240803164242 Aug 03 16:42:42 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm certmonger[7927]: 2024-08-03 16:42:42 [7927] Wrote to /var/lib/certmonger/requests/20240803164242 Aug 03 16:42:42 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm certmonger[7927]: 2024-08-03 16:42:42 [7927] Wrote to /var/lib/certmonger/requests/20240803164242 Aug 03 16:42:42 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm certmonger[7927]: 2024-08-03 16:42:42 [7927] Wrote to /var/lib/certmonger/requests/20240803164242 Aug 03 16:42:42 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm certmonger[7927]: 2024-08-03 16:42:42 [7927] Wrote to /var/lib/certmonger/requests/20240803164242 Aug 03 16:42:42 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm certmonger[7927]: 2024-08-03 16:42:42 [7927] Wrote to /var/lib/certmonger/requests/20240803164242 Aug 03 16:42:42 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm certmonger[7927]: 2024-08-03 16:42:42 [7927] Wrote to /var/lib/certmonger/requests/20240803164242 Aug 03 16:42:42 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm certmonger[7927]: 2024-08-03 16:42:42 [7927] Wrote to /var/lib/certmonger/requests/20240803164242 Aug 03 16:42:42 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm certmonger[7927]: 2024-08-03 16:42:42 [7927] Wrote to /var/lib/certmonger/requests/20240803164242 Aug 03 16:42:42 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm certmonger[7927]: 2024-08-03 16:42:42 [7927] Wrote to /var/lib/certmonger/requests/20240803164242 Aug 03 16:42:42 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm certmonger[7927]: 2024-08-03 16:42:42 [7927] Wrote to /var/lib/certmonger/requests/20240803164242 Aug 03 16:42:42 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm certmonger[7927]: 2024-08-03 16:42:42 [7927] Wrote to /var/lib/certmonger/requests/20240803164242 Aug 03 16:42:42 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm certmonger[7927]: 2024-08-03 16:42:42 [7927] Wrote to /var/lib/certmonger/requests/20240803164242 Aug 03 16:42:42 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm certmonger[7927]: 2024-08-03 16:42:42 [7927] Wrote to /var/lib/certmonger/requests/20240803164242 Aug 03 16:42:42 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm certmonger[7927]: 2024-08-03 16:42:42 [7927] Wrote to /var/lib/certmonger/requests/20240803164242 Aug 03 16:42:42 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm certmonger[7927]: 2024-08-03 16:42:42 [7927] Wrote to /var/lib/certmonger/requests/20240803164242 Aug 03 16:42:42 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm certmonger[7927]: 2024-08-03 16:42:42 [7927] Wrote to /var/lib/certmonger/requests/20240803164242 Aug 03 16:42:42 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm certmonger[7927]: 2024-08-03 16:42:42 [7927] Wrote to /var/lib/certmonger/requests/20240803164242 Aug 03 16:42:42 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm certmonger[7987]: Certificate in file "/etc/pki/tls/certs/quadlet_demo.crt" issued by CA and saved. Aug 03 16:42:42 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm certmonger[7927]: 2024-08-03 16:42:42 [7927] Wrote to /var/lib/certmonger/requests/20240803164242 Aug 03 16:42:42 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3.12[8005]: ansible-slurp Invoked with path=/etc/pki/tls/certs/quadlet_demo.crt src=/etc/pki/tls/certs/quadlet_demo.crt Aug 03 16:42:42 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3.12[8023]: ansible-slurp Invoked with path=/etc/pki/tls/private/quadlet_demo.key src=/etc/pki/tls/private/quadlet_demo.key Aug 03 16:42:43 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3.12[8041]: ansible-slurp Invoked with path=/etc/pki/tls/certs/quadlet_demo.crt src=/etc/pki/tls/certs/quadlet_demo.crt Aug 03 16:42:43 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3.12[8059]: ansible-ansible.legacy.command Invoked with _raw_params=getcert stop-tracking -f /etc/pki/tls/certs/quadlet_demo.crt _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Aug 03 16:42:43 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm certmonger[7927]: 2024-08-03 16:42:43 [7927] Wrote to /var/lib/certmonger/requests/20240803164242 Aug 03 16:42:43 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3.12[8078]: ansible-file Invoked with path=/etc/pki/tls/certs/quadlet_demo.crt state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Aug 03 16:42:43 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3.12[8096]: ansible-file Invoked with path=/etc/pki/tls/private/quadlet_demo.key state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Aug 03 16:42:44 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3.12[8114]: ansible-file Invoked with path=/etc/pki/tls/certs/quadlet_demo.crt state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Aug 03 16:42:44 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm systemd[1]: man-db-cache-update.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit man-db-cache-update.service has successfully entered the 'dead' state. Aug 03 16:42:44 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm systemd[1]: Finished man-db-cache-update.service. ░░ Subject: A start job for unit man-db-cache-update.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit man-db-cache-update.service has finished successfully. ░░ ░░ The job identifier is 1702. Aug 03 16:42:44 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3.12[8132]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Aug 03 16:42:44 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm systemd[1]: run-rc4a75727ad5043baaa3a7236ce1063c8.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-rc4a75727ad5043baaa3a7236ce1063c8.service has successfully entered the 'dead' state. Aug 03 16:42:45 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3.12[8172]: ansible-ansible.legacy.dnf Invoked with name=['iptables-nft', 'podman', 'shadow-utils-subid'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Aug 03 16:43:04 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: SELinux: Converting 407 SID table entries... Aug 03 16:43:04 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: SELinux: policy capability network_peer_controls=1 Aug 03 16:43:04 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: SELinux: policy capability open_perms=1 Aug 03 16:43:04 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: SELinux: policy capability extended_socket_class=1 Aug 03 16:43:04 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: SELinux: policy capability always_check_network=0 Aug 03 16:43:04 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: SELinux: policy capability cgroup_seclabel=1 Aug 03 16:43:04 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: SELinux: policy capability nnp_nosuid_transition=1 Aug 03 16:43:04 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: SELinux: policy capability genfs_seclabel_symlinks=1 Aug 03 16:43:04 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: SELinux: policy capability ioctl_skip_cloexec=0 Aug 03 16:43:04 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: SELinux: policy capability userspace_initial_context=0 Aug 03 16:43:11 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: SELinux: Converting 407 SID table entries... Aug 03 16:43:11 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: SELinux: policy capability network_peer_controls=1 Aug 03 16:43:11 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: SELinux: policy capability open_perms=1 Aug 03 16:43:11 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: SELinux: policy capability extended_socket_class=1 Aug 03 16:43:11 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: SELinux: policy capability always_check_network=0 Aug 03 16:43:11 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: SELinux: policy capability cgroup_seclabel=1 Aug 03 16:43:11 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: SELinux: policy capability nnp_nosuid_transition=1 Aug 03 16:43:11 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: SELinux: policy capability genfs_seclabel_symlinks=1 Aug 03 16:43:11 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: SELinux: policy capability ioctl_skip_cloexec=0 Aug 03 16:43:11 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: SELinux: policy capability userspace_initial_context=0 Aug 03 16:43:19 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: SELinux: Converting 407 SID table entries... Aug 03 16:43:19 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: SELinux: policy capability network_peer_controls=1 Aug 03 16:43:19 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: SELinux: policy capability open_perms=1 Aug 03 16:43:19 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: SELinux: policy capability extended_socket_class=1 Aug 03 16:43:19 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: SELinux: policy capability always_check_network=0 Aug 03 16:43:19 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: SELinux: policy capability cgroup_seclabel=1 Aug 03 16:43:19 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: SELinux: policy capability nnp_nosuid_transition=1 Aug 03 16:43:19 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: SELinux: policy capability genfs_seclabel_symlinks=1 Aug 03 16:43:19 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: SELinux: policy capability ioctl_skip_cloexec=0 Aug 03 16:43:19 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: SELinux: policy capability userspace_initial_context=0 Aug 03 16:43:20 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm setsebool[8230]: The virt_use_nfs policy boolean was changed to 1 by root Aug 03 16:43:20 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm setsebool[8230]: The virt_sandbox_use_all_caps policy boolean was changed to 1 by root Aug 03 16:43:28 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: SELinux: Converting 409 SID table entries... Aug 03 16:43:28 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: SELinux: policy capability network_peer_controls=1 Aug 03 16:43:28 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: SELinux: policy capability open_perms=1 Aug 03 16:43:28 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: SELinux: policy capability extended_socket_class=1 Aug 03 16:43:28 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: SELinux: policy capability always_check_network=0 Aug 03 16:43:28 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: SELinux: policy capability cgroup_seclabel=1 Aug 03 16:43:28 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: SELinux: policy capability nnp_nosuid_transition=1 Aug 03 16:43:28 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: SELinux: policy capability genfs_seclabel_symlinks=1 Aug 03 16:43:28 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: SELinux: policy capability ioctl_skip_cloexec=0 Aug 03 16:43:28 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: SELinux: policy capability userspace_initial_context=0 Aug 03 16:43:36 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: SELinux: Converting 409 SID table entries... Aug 03 16:43:36 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: SELinux: policy capability network_peer_controls=1 Aug 03 16:43:36 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: SELinux: policy capability open_perms=1 Aug 03 16:43:36 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: SELinux: policy capability extended_socket_class=1 Aug 03 16:43:36 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: SELinux: policy capability always_check_network=0 Aug 03 16:43:36 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: SELinux: policy capability cgroup_seclabel=1 Aug 03 16:43:36 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: SELinux: policy capability nnp_nosuid_transition=1 Aug 03 16:43:36 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: SELinux: policy capability genfs_seclabel_symlinks=1 Aug 03 16:43:36 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: SELinux: policy capability ioctl_skip_cloexec=0 Aug 03 16:43:36 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: SELinux: policy capability userspace_initial_context=0 Aug 03 16:43:53 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm systemd[1]: Started run-r223c3e6d9ec443f1b7a3c5ab9689cd6e.service - /usr/bin/systemctl start man-db-cache-update. ░░ Subject: A start job for unit run-r223c3e6d9ec443f1b7a3c5ab9689cd6e.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit run-r223c3e6d9ec443f1b7a3c5ab9689cd6e.service has finished successfully. ░░ ░░ The job identifier is 1857. Aug 03 16:43:53 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm systemd[1]: Starting man-db-cache-update.service... ░░ Subject: A start job for unit man-db-cache-update.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit man-db-cache-update.service has begun execution. ░░ ░░ The job identifier is 1934. Aug 03 16:43:53 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm systemd[1]: Reload requested from client PID 9065 ('systemctl') (unit session-6.scope)... Aug 03 16:43:53 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm systemd[1]: Reloading... Aug 03 16:43:54 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm systemd[1]: Reloading finished in 276 ms. Aug 03 16:43:54 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm systemd[1]: Queuing reload/restart jobs for marked units… Aug 03 16:43:54 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm systemd[3738]: Created slice session.slice - User Core Session Slice. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 18. Aug 03 16:43:54 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm systemd[3738]: Starting dbus-broker.service - D-Bus User Message Bus... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 13. Aug 03 16:43:54 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm systemd[3738]: Started dbus-broker.service - D-Bus User Message Bus. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 13. Aug 03 16:43:54 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm dbus-broker-launch[9126]: Policy to allow eavesdropping in /usr/share/dbus-1/session.conf +31: Eavesdropping is deprecated and ignored Aug 03 16:43:54 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm dbus-broker-launch[9126]: Policy to allow eavesdropping in /usr/share/dbus-1/session.conf +33: Eavesdropping is deprecated and ignored Aug 03 16:43:54 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm dbus-broker-launch[9126]: Ready Aug 03 16:43:54 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm systemd[3738]: Reload requested from client PID 9125 ('systemctl')... Aug 03 16:43:54 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm systemd[3738]: Reloading... Aug 03 16:43:54 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm systemd[3738]: Reloading finished in 63 ms. Aug 03 16:43:54 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm systemd[3738]: Queuing reload/restart jobs for marked units… Aug 03 16:43:55 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm systemd[1]: man-db-cache-update.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit man-db-cache-update.service has successfully entered the 'dead' state. Aug 03 16:43:55 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm systemd[1]: Finished man-db-cache-update.service. ░░ Subject: A start job for unit man-db-cache-update.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit man-db-cache-update.service has finished successfully. ░░ ░░ The job identifier is 1934. Aug 03 16:43:55 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm systemd[1]: run-r223c3e6d9ec443f1b7a3c5ab9689cd6e.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-r223c3e6d9ec443f1b7a3c5ab9689cd6e.service has successfully entered the 'dead' state. Aug 03 16:43:56 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3.12[9435]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Aug 03 16:43:56 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3.12[9460]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None Aug 03 16:43:56 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3.12[9479]: ansible-getent Invoked with database=group key=0 fail_key=False service=None split=None Aug 03 16:43:56 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3.12[9498]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Aug 03 16:43:57 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3.12[9518]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Aug 03 16:43:58 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3.12[9536]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Aug 03 16:43:58 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3.12[9554]: ansible-ansible.legacy.dnf Invoked with name=['firewalld'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Aug 03 16:43:59 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3.12[9573]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False scope=system no_block=False state=None enabled=None force=None Aug 03 16:43:59 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3.12[9593]: ansible-ansible.legacy.systemd Invoked with name=firewalld state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Aug 03 16:43:59 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm systemd[1]: Reload requested from client PID 9596 ('systemctl') (unit session-6.scope)... Aug 03 16:43:59 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm systemd[1]: Reloading... Aug 03 16:43:59 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm systemd[1]: Reloading finished in 178 ms. Aug 03 16:43:59 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm systemd[1]: Starting firewalld.service - firewalld - dynamic firewall daemon... ░░ Subject: A start job for unit firewalld.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit firewalld.service has begun execution. ░░ ░░ The job identifier is 2012. Aug 03 16:44:00 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm systemd[1]: Started firewalld.service - firewalld - dynamic firewall daemon. ░░ Subject: A start job for unit firewalld.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit firewalld.service has finished successfully. ░░ ░░ The job identifier is 2012. Aug 03 16:44:01 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3.12[9678]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['8000/tcp'] permanent=True runtime=True state=enabled __report_changed=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Aug 03 16:44:01 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm systemd[1]: Starting polkit.service - Authorization Manager... ░░ Subject: A start job for unit polkit.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit polkit.service has begun execution. ░░ ░░ The job identifier is 2095. Aug 03 16:44:01 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm polkitd[9709]: Started polkitd version 124 Aug 03 16:44:01 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm polkitd[9709]: Loading rules from directory /etc/polkit-1/rules.d Aug 03 16:44:01 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm polkitd[9709]: Loading rules from directory /usr/share/polkit-1/rules.d Aug 03 16:44:01 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm polkitd[9709]: Finished loading, compiling and executing 2 rules Aug 03 16:44:01 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm systemd[1]: Started polkit.service - Authorization Manager. ░░ Subject: A start job for unit polkit.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit polkit.service has finished successfully. ░░ ░░ The job identifier is 2095. Aug 03 16:44:01 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm polkitd[9709]: Acquired the name org.freedesktop.PolicyKit1 on the system bus Aug 03 16:44:01 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm firewalld[9649]: ERROR: Backup of file '/etc/firewalld/zones/public.xml' failed: [Errno 13] Permission denied: '/etc/firewalld/zones/public.xml.old' Aug 03 16:44:01 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm firewalld[9649]: Traceback (most recent call last): File "/usr/lib/python3.12/site-packages/firewall/server/decorators.py", line 57, in _impl return func(*args, **kwargs) ^^^^^^^^^^^^^^^^^^^^^ File "/usr/lib/python3.12/site-packages/firewall/server/config_zone.py", line 280, in update2 self.obj = self.config.set_zone_config_dict(self.obj, settings) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/usr/lib/python3.12/site-packages/firewall/core/fw_config.py", line 811, in set_zone_config_dict zone_writer(x) File "/usr/lib/python3.12/site-packages/firewall/core/io/zone.py", line 506, in zone_writer f = io.open(name, mode="wt", encoding="UTF-8") ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ PermissionError: [Errno 13] Permission denied: '/etc/firewalld/zones/public.xml' Aug 03 16:44:01 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3.12[9735]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['9000/tcp'] permanent=True runtime=True state=enabled __report_changed=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Aug 03 16:44:01 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm firewalld[9649]: ERROR: Backup of file '/etc/firewalld/zones/public.xml' failed: [Errno 13] Permission denied: '/etc/firewalld/zones/public.xml.old' Aug 03 16:44:01 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm firewalld[9649]: Traceback (most recent call last): File "/usr/lib/python3.12/site-packages/firewall/server/decorators.py", line 57, in _impl return func(*args, **kwargs) ^^^^^^^^^^^^^^^^^^^^^ File "/usr/lib/python3.12/site-packages/firewall/server/config_zone.py", line 280, in update2 self.obj = self.config.set_zone_config_dict(self.obj, settings) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/usr/lib/python3.12/site-packages/firewall/core/fw_config.py", line 811, in set_zone_config_dict zone_writer(x) File "/usr/lib/python3.12/site-packages/firewall/core/io/zone.py", line 506, in zone_writer f = io.open(name, mode="wt", encoding="UTF-8") ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ PermissionError: [Errno 13] Permission denied: '/etc/firewalld/zones/public.xml' Aug 03 16:44:01 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3.12[9753]: ansible-ansible.legacy.command Invoked with _raw_params=journalctl -ex _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Aug 03 16:44:02 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3.12[9772]: ansible-ansible.legacy.command Invoked with _raw_params=podman ps -a _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Aug 03 16:44:02 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm systemd[1]: var-lib-containers-storage-overlay-compat2389640300-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-compat2389640300-merged.mount has successfully entered the 'dead' state. Aug 03 16:44:02 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm kernel: evm: overlay not supported Aug 03 16:44:02 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm podman[9773]: 2024-08-03 16:44:02.264770264 +0000 UTC m=+0.078701254 system refresh Aug 03 16:44:02 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3.12[9799]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod ps --ctr-ids --ctr-names --ctr-status _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Aug 03 16:44:02 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3.12[9825]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail; systemctl list-units --all | grep quadlet _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Aug 03 16:44:02 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3.12[9846]: ansible-ansible.legacy.command Invoked with _raw_params=ls -alrtF /etc/systemd/system _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Aug 03 16:44:03 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Aug 03 16:44:03 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3.12[9883]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Aug 03 16:44:04 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3.12[9907]: ansible-getent Invoked with database=group key=0 fail_key=False service=None split=None Aug 03 16:44:04 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3.12[9926]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Aug 03 16:44:05 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3.12[9946]: ansible-ansible.legacy.dnf Invoked with name=['firewalld'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Aug 03 16:44:06 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3.12[9965]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False scope=system no_block=False state=None enabled=None force=None Aug 03 16:44:06 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3.12[9985]: ansible-ansible.legacy.systemd Invoked with name=firewalld state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Aug 03 16:44:06 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3.12[10005]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['8000/tcp'] permanent=True runtime=True state=enabled __report_changed=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Aug 03 16:44:07 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm firewalld[9649]: ERROR: Backup of file '/etc/firewalld/zones/public.xml' failed: [Errno 13] Permission denied: '/etc/firewalld/zones/public.xml.old' Aug 03 16:44:07 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm firewalld[9649]: Traceback (most recent call last): File "/usr/lib/python3.12/site-packages/firewall/server/decorators.py", line 57, in _impl return func(*args, **kwargs) ^^^^^^^^^^^^^^^^^^^^^ File "/usr/lib/python3.12/site-packages/firewall/server/config_zone.py", line 280, in update2 self.obj = self.config.set_zone_config_dict(self.obj, settings) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/usr/lib/python3.12/site-packages/firewall/core/fw_config.py", line 811, in set_zone_config_dict zone_writer(x) File "/usr/lib/python3.12/site-packages/firewall/core/io/zone.py", line 506, in zone_writer f = io.open(name, mode="wt", encoding="UTF-8") ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ PermissionError: [Errno 13] Permission denied: '/etc/firewalld/zones/public.xml' Aug 03 16:44:07 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3.12[10023]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['9000/tcp'] permanent=True runtime=True state=enabled __report_changed=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Aug 03 16:44:07 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm firewalld[9649]: ERROR: Backup of file '/etc/firewalld/zones/public.xml' failed: [Errno 13] Permission denied: '/etc/firewalld/zones/public.xml.old' Aug 03 16:44:07 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm firewalld[9649]: Traceback (most recent call last): File "/usr/lib/python3.12/site-packages/firewall/server/decorators.py", line 57, in _impl return func(*args, **kwargs) ^^^^^^^^^^^^^^^^^^^^^ File "/usr/lib/python3.12/site-packages/firewall/server/config_zone.py", line 280, in update2 self.obj = self.config.set_zone_config_dict(self.obj, settings) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/usr/lib/python3.12/site-packages/firewall/core/fw_config.py", line 811, in set_zone_config_dict zone_writer(x) File "/usr/lib/python3.12/site-packages/firewall/core/io/zone.py", line 506, in zone_writer f = io.open(name, mode="wt", encoding="UTF-8") ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ PermissionError: [Errno 13] Permission denied: '/etc/firewalld/zones/public.xml' Aug 03 16:44:07 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3.12[10041]: ansible-ansible.legacy.command Invoked with _raw_params=exec 1>&2 set -x set -o pipefail systemctl list-units --plain -l --all | grep quadlet || : systemctl list-unit-files --all | grep quadlet || : systemctl list-units --plain --failed -l --all | grep quadlet || : _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Aug 03 16:44:08 721d47bb-e415-4081-a54d-27dbd2c3bd29.testing-farm python3.12[10066]: ansible-ansible.legacy.command Invoked with _raw_params=journalctl -ex _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None to retry, use: --limit @/tmp/tests_quadlet_demo.retry PLAY RECAP ********************************************************************* sut : ok=74 changed=9 unreachable=0 failed=2 skipped=71 rescued=2 ignored=0 Saturday 03 August 2024 16:44:08 +0000 (0:00:00.233) 0:01:34.997 ******* =============================================================================== fedora.linux_system_roles.podman : Ensure required packages are installed -- 70.79s /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:14 fedora.linux_system_roles.certificate : Ensure provider packages are installed --- 3.33s /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:23 fedora.linux_system_roles.certificate : Ensure certificate role dependencies are installed --- 2.71s /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:5 fedora.linux_system_roles.firewall : Enable and start firewalld service --- 1.45s /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:28 fedora.linux_system_roles.firewall : Configure firewall ----------------- 1.10s /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:71 Gathering Facts --------------------------------------------------------- 0.92s /WORKDIR/git-weekly-cig48sz_fx/tests/tests_quadlet_demo.yml:9 ----------------- fedora.linux_system_roles.certificate : Ensure provider service is running --- 0.78s /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:90 fedora.linux_system_roles.certificate : Ensure certificate requests ----- 0.71s /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:101 fedora.linux_system_roles.podman : Gather the package facts ------------- 0.68s /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 fedora.linux_system_roles.certificate : Remove files -------------------- 0.67s /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:181 fedora.linux_system_roles.firewall : Install firewalld ------------------ 0.63s /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:31 fedora.linux_system_roles.certificate : Slurp the contents of the files --- 0.62s /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:152 fedora.linux_system_roles.firewall : Install firewalld ------------------ 0.59s /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:31 fedora.linux_system_roles.firewall : Configure firewall ----------------- 0.57s /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:71 Debug ------------------------------------------------------------------- 0.56s /WORKDIR/git-weekly-cig48sz_fx/tests/tests_quadlet_demo.yml:187 --------------- fedora.linux_system_roles.podman : Gather the package facts ------------- 0.55s /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 fedora.linux_system_roles.firewall : Unmask firewalld service ----------- 0.36s /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:22 fedora.linux_system_roles.firewall : Unmask firewalld service ----------- 0.35s /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:22 fedora.linux_system_roles.firewall : Enable and start firewalld service --- 0.35s /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:28 fedora.linux_system_roles.podman : Get user information ----------------- 0.29s /WORKDIR/git-weekly-cig48sz_fx/.collection/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 ---^---^---^---^---^--- # STDERR: ---v---v---v---v---v--- [DEPRECATION WARNING]: ANSIBLE_COLLECTIONS_PATHS option, does not fit var naming standard, use the singular form ANSIBLE_COLLECTIONS_PATH instead. This feature will be removed from ansible-core in version 2.19. Deprecation warnings can be disabled by setting deprecation_warnings=False in ansible.cfg. [WARNING]: Platform linux on host sut is using the discovered Python interpreter at /usr/bin/python3.12, but future installation of another Python interpreter could change the meaning of that path. See https://docs.ansible.com/ansible- core/2.17/reference_appendices/interpreter_discovery.html for more information. ---^---^---^---^---^---